def create_monthly_intvar(run_type, ref_start, ref_end, n_pcs=22, run_n=400):
    # load in the PCs and EOFs
    histo_sy = 1899
    histo_ey = 2010
    
#    monthly_pc_fname = get_HadISST_monthly_residual_PCs_fname(histo_sy, histo_ey, run_n)
#    monthly_pcs = load_data(monthly_pc_fname)
#    monthly_eof_fname = get_HadISST_monthly_residual_EOFs_fname(histo_sy, histo_ey, run_n)
#    monthly_eofs = load_sst_data(monthly_eof_fname, "sst")

    monthly_residuals_fname = get_HadISST_monthly_residuals_fname(histo_sy, histo_ey, run_n)
    # open netcdf_file
    fh = netcdf_file(monthly_residuals_fname, 'r')
    attrs = fh.variables["sst"]._attributes
    mv = attrs["_FillValue"]
    var = fh.variables["sst"]
    monthly_residuals = numpy.ma.masked_equal(var[:], mv)

    # weights for reconstruction / projection
    coslat = numpy.cos(numpy.deg2rad(numpy.arange(89.5, -90.5, -1)).clip(0., 1.))
    wgts = numpy.sqrt(coslat)[..., numpy.newaxis]
    eof_solver = Eof(monthly_residuals, center=False, weights=wgts)
    monthly_pcs = eof_solver.pcs(npcs=n_pcs)
    monthly_eofs = eof_solver.eofs(neofs=n_pcs)
    
    # get the explanation of variance and calculate the scalar from it
    M = 1.0 / numpy.sum(eof_solver.varianceFraction(neigs=n_pcs))
    
    # get the number of months to predict the PCs for and create the storage
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    n_mnths = 12*(rcp_ey - histo_sy)
    predicted_pcs = numpy.zeros([n_mnths+12, n_pcs], "f")

    # fit an AR process to the first ~20 pcs
    for pc in range(0, n_pcs):
        # create the model
        arn = ARN(monthly_pcs[:,pc].squeeze())
        # fit the model to the data
        res = arn.fit()
        arp = res.k_ar
        # create a timeseries of predicted values
        predicted_pcs[:,pc] = M*arn.predict(res.params, noise='all', dynamic=True, start=arp, end=n_mnths+arp+11)

    # reconstruct the field and return
    # reconstruct the field
    monthly_intvar = reconstruct_field(predicted_pcs, monthly_eofs[:n_pcs], n_pcs, wgts)
    return monthly_intvar
Ejemplo n.º 2
0
def smooth(variable, window) :
    from axis import Axes, TimeAxis
    from variable import Variable
    if len(variable.shape) > 1 :
        raise NotImplementedError    
    try :
        variable.dts
    except :
        raise NotImplementedError    
    if window%2 == 0 :
        raise NotImplementedError    
    mask = np.ones(window)
    #mask[int(window/2)] = 1    
    mask /= window*1.0
    newAxes = Axes()
    newAxes['time'] = TimeAxis(variable.dts[int(window/2):-int(window/2)])
    return Variable(
            data = np.convolve(variable.data, mask, mode='valid'),
            axes = newAxes,
            metadata = variable.metadata)
    from eofs.standard import Eof
    wgts = np.cos(variable.lats*np.pi/180)**0.5
    solver = Eof(variable.data, weights = wgts[:, None])
    eof1 = solver.eofs(eofscaling=2, neofs=1)
    print solver.varianceFraction(neigs=1)[0]*100, '%'
    output = variable[0].empty()
    output.data = eof1[0]
    return output
def calc_EOF2D(anom, nplat, coslat, varcode):

    # apply sqrt cos latitude weighting
    wgts = np.sqrt(coslat)
    wgts = wgts[:, np.newaxis]

    # leading EOF
    solver = Eof(anom, weights=wgts)
    eof1 = solver.eofs(neofs=1, eofscaling=0)[0]
    if varcode == 'PSL':
        if eof1[np.where(nplat >= 68)[0][0], 0] > 0:  # PSL
            eof1 = -eof1
    elif varcode == 'Z3':
        if eof1[np.where(nplat >= 75)[0][0], 0] > 0:  # Z3
            eof1 = -eof1
    elif varcode == 'U':
        if eof1[np.where(nplat >= 60)[0][0], 0] < 0:  # U
            eof1 = -eof1

    # leading principal component
    PC1 = np.empty([anom.shape[0]])
    for itime in range(anom.shape[0]):
        PC1[itime] = np.dot(anom[itime, :, :].flatten(), eof1.flatten())

    return (eof1, PC1)
Ejemplo n.º 4
0
def calculate_EAsia_rm_eofs(data,
                            lats,
                            lons,
                            lat_min=20,
                            lat_max=50,
                            lon_min=110,
                            lon_max=180):
    """ Calculates EOFs over the East Asian region.
    Regresses the principal components back onto the original data"""
    lat_mask = (lats >= lat_min) & (lats <= lat_max)
    lon_mask = (lons >= lon_min) & (lons <= lon_max)
    data_EAsia = data[:, lat_mask, :][:, :, lon_mask]
    # calculate EOFs
    coslat = np.cos(np.deg2rad(lats[lat_mask]))
    wgts = np.sqrt(coslat)[..., np.newaxis]
    solver = Eof(data_EAsia, weights=wgts)
    var_frac = solver.varianceFraction()
    pcs = solver.pcs(npcs=3, pcscaling=1)
    # regress first modes onto original data
    reg_pc1, pval_pc1 = regress_map.regress_map(pcs[:, 0],
                                                data,
                                                map_type='regress')
    reg_pc2, pval_pc2 = regress_map.regress_map(pcs[:, 1],
                                                data,
                                                map_type='regress')
    reg_pc3, pval_pc3 = regress_map.regress_map(pcs[:, 2],
                                                data,
                                                map_type='regress')
    return var_frac, reg_pc1, pval_pc1, reg_pc2, pval_pc2, reg_pc3, pval_pc3
Ejemplo n.º 5
0
    def compute_ipo(sst_anoms, years_pass=11, N=2.0):
        high = np.int(years_pass * 12.)
        B, A = signal.butter(N, N / high, btype='lowpass', output='ba')

        def filter_SST(x):
            if any(np.isnan(x)):
                z = x
            else:
                z = signal.filtfilt(B, A, x)
            return z

        sst_anoms['sst_filtered'] = (('time', 'lat', 'lon'),
                                     np.apply_along_axis(
                                         filter_SST, 0,
                                         sst_anoms['sst_masked'].data))

        lat = sst_anoms['lat'].values
        lon = sst_anoms['lon'].values
        lons, lats = np.meshgrid(lon, lat)
        coslat = np.cos(np.deg2rad(lat))
        wgts = np.sqrt(coslat)[..., np.newaxis]
        sst_anoms.load()
        X = sst_anoms['sst_filtered'].data
        solver = Eof(X, weights=wgts)
        eofs = solver.eofsAsCorrelation(neofs=5)
        pcs = solver.pcs(npcs=5, pcscaling=1)
        PCs = pd.DataFrame(pcs, index=sst_anoms['time'].to_index())
        PCs_monthly = solver.projectField(sst_anoms['sst_masked'].data, 5)
        PCs_monthly = pd.DataFrame(PCs_monthly,
                                   index=sst_anoms['time'].to_index())
        return eofs, PCs, lons, lats, PCs_monthly
def calc_HadISST_residual_EOFs(histo_sy, histo_ey, run_n):
    # load the already calculated residuals
    resid_fname = get_HadISST_residuals_fname(histo_sy, histo_ey, run_n)
    # open netcdf_file
    fh = netcdf_file(resid_fname, 'r')
    lats_var = fh.variables["latitude"]
    lons_var = fh.variables["longitude"]
    attrs = fh.variables["sst"]._attributes
    mv = attrs["_FillValue"]
    var = fh.variables["sst"]
    sst_data = numpy.ma.masked_equal(var[:], mv)

    # calculate the EOFs and PCs
    # take the eofs
    coslat = numpy.cos(numpy.deg2rad(lats_var[:])).clip(0., 1.)
    wgts = numpy.sqrt(coslat)[..., numpy.newaxis]
    eof_solver = Eof(sst_data, center=False, weights=wgts)
    pcs = eof_solver.pcs(npcs=None)
    eofs = eof_solver.eofs(neofs=None)

    # get the output names
    out_eofs_fname = get_HadISST_residual_EOFs_fname(histo_sy, histo_ey, run_n)
    out_pcs_fname  = get_HadISST_residual_PCs_fname(histo_sy, histo_ey, run_n)
    
    # save the eofs and pcs
    save_3d_file(out_eofs_fname, eofs, attrs, lats_var, lons_var)
    save_pcs(out_pcs_fname, pcs, attrs)
    fh.close()
Ejemplo n.º 7
0
def eof(variable) :
    from eofs.standard import Eof
    wgts = np.cos(variable.lats*np.pi/180)**0.5
    solver = Eof(variable.data, weights = wgts[:, None])
    eof1 = solver.eofs(eofscaling=2, neofs=1)
    print solver.varianceFraction(neigs=1)[0]*100, '%'
    output = variable[0].empty()
    output.data = eof1[0]
    return output
Ejemplo n.º 8
0
 def eof(self, no_of_eofs=1):
     data_mean = self._data.mean(axis=0)
     coslat = np.cos(np.deg2rad(self._lat)).clip(0., 1.)  #weighting
     wgts = np.sqrt(coslat)[..., np.newaxis]
     solver = Eof(self._data, weights=wgts)
     # Retrieve the leading EOF, expressed as the covariance between the leading PC
     # time series and the input SLP anomalies at each grid point.
     eof = solver.eofsAsCorrelation(neofs=no_of_eofs)
     fraction = solver.varianceFraction(no_of_eofs)
     return Eof_pattern(eof, fraction, self._lat, self._lon)
def reconstruct_data(arr, neofs=16):
    if type(neofs) == int:
        neofs = [neofs]

    solver = Eof(arr, center=False)
    for n in neofs: 
        reconstructed = solver.reconstructedField(neofs=n)
        pcs = solver.pcs(npcs=n)
        eofs = solver.eofs(neofs=n)
        yield reconstructed, pcs, eofs
Ejemplo n.º 10
0
 def eof(self,no_of_eofs=1):
     data_mean = self._data.mean(axis=0)
     coslat = np.cos(np.deg2rad(self._lat)).clip(0., 1.) 	#weighting
     wgts = np.sqrt(coslat)[..., np.newaxis]
     solver = Eof(self._data, weights=wgts)
     # Retrieve the leading EOF, expressed as the covariance between the leading PC
     # time series and the input SLP anomalies at each grid point.
     eof = solver.eofsAsCorrelation(neofs=no_of_eofs)
     fraction=solver.varianceFraction(no_of_eofs)
     return Eof_pattern(eof, fraction,self._lat,self._lon)
Ejemplo n.º 11
0
def create_monthly_intvar(run_type, ref_start, ref_end, n_pcs=22, run_n=400):
    # load in the PCs and EOFs
    histo_sy = 1899
    histo_ey = 2010

    #    monthly_pc_fname = get_HadISST_monthly_residual_PCs_fname(histo_sy, histo_ey, run_n)
    #    monthly_pcs = load_data(monthly_pc_fname)
    #    monthly_eof_fname = get_HadISST_monthly_residual_EOFs_fname(histo_sy, histo_ey, run_n)
    #    monthly_eofs = load_sst_data(monthly_eof_fname, "sst")

    monthly_residuals_fname = get_HadISST_monthly_residuals_fname(
        histo_sy, histo_ey, run_n)
    # open netcdf_file
    fh = netcdf_file(monthly_residuals_fname, 'r')
    attrs = fh.variables["sst"]._attributes
    mv = attrs["_FillValue"]
    var = fh.variables["sst"]
    monthly_residuals = numpy.ma.masked_equal(var[:], mv)

    # weights for reconstruction / projection
    coslat = numpy.cos(
        numpy.deg2rad(numpy.arange(89.5, -90.5, -1)).clip(0., 1.))
    wgts = numpy.sqrt(coslat)[..., numpy.newaxis]
    eof_solver = Eof(monthly_residuals, center=False, weights=wgts)
    monthly_pcs = eof_solver.pcs(npcs=n_pcs)
    monthly_eofs = eof_solver.eofs(neofs=n_pcs)

    # get the explanation of variance and calculate the scalar from it
    M = 1.0 / numpy.sum(eof_solver.varianceFraction(neigs=n_pcs))

    # get the number of months to predict the PCs for and create the storage
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    n_mnths = 12 * (rcp_ey - histo_sy)
    predicted_pcs = numpy.zeros([n_mnths + 12, n_pcs], "f")

    # fit an AR process to the first ~20 pcs
    for pc in range(0, n_pcs):
        # create the model
        arn = ARN(monthly_pcs[:, pc].squeeze())
        # fit the model to the data
        res = arn.fit()
        arp = res.k_ar
        # create a timeseries of predicted values
        predicted_pcs[:, pc] = M * arn.predict(res.params,
                                               noise='all',
                                               dynamic=True,
                                               start=arp,
                                               end=n_mnths + arp + 11)

    # reconstruct the field and return
    # reconstruct the field
    monthly_intvar = reconstruct_field(predicted_pcs, monthly_eofs[:n_pcs],
                                       n_pcs, wgts)
    return monthly_intvar
Ejemplo n.º 12
0
    def eof(in_bands):
        data = np.array([in_bands[i].data for i in range(len(in_bands))])

        #take eof over time dimension
        solver = Eof(data)

        eof1 = solver.eofs(neofs=1)[0, :]
        cube = in_bands[0].copy()
        cube.data = eof1

        pc1 = solver.pcs(pcscaling=1, npcs=1)[:, 0]
        var_frac = solver.varianceFraction(neigs=1)[0]
        return cube, pc1, var_frac
Ejemplo n.º 13
0
def get_EOF(data, order=1, mode='corr'):
    '''
    :param data: image data, sst or t300, [month, lon, lat]
    :param order: int
    return: eof_corr, eof_cova [order, lon, lat],
            pc [month, order]
    '''
    solver = Eof(data)
    if mode == 'corr':
        res = solver.eofsAsCorrelation(neofs=order)
    elif mode == 'cova':
        res = solver.eofsAsCovariance(neofs=order)
    elif mode == 'pc':
        res = solver.pcs(npcs=order, pcscaling=1)
    return res
def calculate_U_EOF(U,
                    SST,
                    THF,
                    lats_ua,
                    lons_ua,
                    lats_SST,
                    lons_SST,
                    lats_THF,
                    lons_THF,
                    lat_min=lat_min,
                    lat_max=lat_max,
                    lon_min=lon_min,
                    lon_max=lon_max,
                    npcs=3):
    """Function to select a given region and return the first few principal component time series
    then regress the pcs back onto the zonal wind and SST."""
    # select region
    lat_mask = (lats_ua >= lat_min) & (lats_ua <= lat_max)
    lon_mask = (lons_ua >= lon_min) & (lons_ua <= lon_max)
    #print(lats.shape,lons.shape,U.shape,lats[lat_mask].shape,lons[lon_mask].shape)
    U_region = U[:, lat_mask, :][:, :, lon_mask]
    U_climatology = np.mean(U, axis=0)

    # Calculate EOFs
    coslat = np.cos(np.deg2rad(lats_ua[lat_mask]))
    wgts = np.sqrt(coslat)[..., np.newaxis]
    solver = Eof(U_region, weights=wgts)
    pcs = solver.pcs(npcs=npcs, pcscaling=1)
    variance_fraction = solver.varianceFraction()

    # perform regressions
    regress_U = np.zeros([npcs, lats_ua.shape[0], lons_ua.shape[0]])
    regress_SST = np.zeros([npcs, lats_SST.shape[0], lons_SST.shape[0]])
    regress_THF = np.zeros([npcs, lats_THF.shape[0], lons_THF.shape[0]])
    for pc_number in np.arange(npcs):
        regress_U[pc_number, :, :] = regress_map(pcs[:, pc_number],
                                                 U,
                                                 map_type='corr')[0]
        regress_SST[pc_number, :, :] = regress_map(pcs[:, pc_number],
                                                   SST,
                                                   map_type='corr')[0]
        regress_THF[pc_number, :, :] = regress_map(pcs[:, pc_number],
                                                   THF,
                                                   map_type='corr')[0]

    return pcs, regress_U, regress_SST, regress_THF, variance_fraction[:
                                                                       npcs], U_climatology
Ejemplo n.º 15
0
def E_Cindex(SSTA, lat, lon):
    """
    E and C indices to define EP&CP
    two orthogonal axes are rotated 45° relative to the principal components of SSTA
    SSTA with (timme,lat,lon)
    """
    #tropical pacific:120E-80W(60-140),20S-20N(35-55)
    SSTA_TP = SSTA[:, (lat <= 30) & (lat >= -30), :]
    SSTA_TP = SSTA_TP[:, :, (lon <= 280) & (lon >= 120)]

    lat1 = lat[(lat <= 30) & (lat >= -30)]
    lon1 = lon[(lon <= 280) & (lon >= 120)]
    #EOF analysis and to get the first 2 pcs
    #coslat=np.cos(np.deg2rad(np.arange(-20,21,2)))
    solver = Eof(SSTA_TP[29:, :, :])
    pcs = solver.pcs(npcs=2, pcscaling=1)
    eof = solver.eofsAsCorrelation(neofs=2)
    a = eof[0, (lat1 <= 5) & (lat1 >= -5), :]
    b = eof[1, (lat1 <= 5) & (lat1 >= -5), :]

    if np.mean(a[:, (lon1 <= 240) & (lon1 >= 190)], (0, 1)) < 0:
        pcs[:, 0] = -pcs[:, 0]

    if np.mean(b[:, (lon1 <= 240) & (lon1 >= 190)], (0, 1)) > 0:
        pcs[:, 1] = -pcs[:, 1]

    #do the 45rotation
    C_index = (pcs[:, 0] + pcs[:, 1]) / np.sqrt(2)
    E_index = (pcs[:, 0] - pcs[:, 1]) / np.sqrt(2)

    #find EP&CP years
    # =============================================================================
    #     CI_std=(C_index-np.mean(C_index))/np.std(C_index)
    #     EI_std=(E_index-np.mean(E_index))/np.std(E_index)
    # =============================================================================

    # =============================================================================
    #     cindex=pd.Series(C_index)
    #     eindex=pd.Series(E_index)
    #
    #
    #     #find EP&CP years
    #     CI_std=(cindex-cindex.rolling(window=30).mean())/cindex.rolling(window=30).std()
    #     EI_std=(eindex-eindex.rolling(window=30).mean())/eindex.rolling(window=30).std()
    # =============================================================================

    return C_index, E_index
Ejemplo n.º 16
0
def eof_computation(var, varunits, lat, lon):
    #----------------------------------------------------------------------------------------
    print(
        '____________________________________________________________________________________________________________________'
    )
    print('Computing the EOFs and PCs')
    #----------------------------------------------------------------------------------------
    # EOF analysis of a data array with spatial dimensions that
    # represent latitude and longitude with weighting. In this example
    # the data array is dimensioned (ntime, nlat, nlon), and in order
    # for the latitude weights to be broadcastable to this shape, an
    # extra length-1 dimension is added to the end:
    weights_array = np.sqrt(np.cos(np.deg2rad(lat)))[:, np.newaxis]

    start = datetime.datetime.now()
    solver = Eof(var, weights=weights_array)
    end = datetime.datetime.now()
    print('EOF computation took me %s seconds' % (end - start))

    #ALL VARIANCE FRACTIONS
    varfrac = solver.varianceFraction()
    acc = np.cumsum(varfrac * 100)

    #------------------------------------------PCs unscaled  (case 0 of scaling)
    pcs_unscal0 = solver.pcs()
    #------------------------------------------EOFs unscaled  (case 0 of scaling)
    eofs_unscal0 = solver.eofs()

    #------------------------------------------PCs scaled  (case 1 of scaling)
    pcs_scal1 = solver.pcs(pcscaling=1)

    #------------------------------------------EOFs scaled (case 2 of scaling)
    eofs_scal2 = solver.eofs(eofscaling=2)

    return solver, pcs_scal1, eofs_scal2, pcs_unscal0, eofs_unscal0, varfrac
def calculate_IOBM(data, lats, lons, times, t_units, calendar):
    """ Calculate the Indian Ocean basin mode as the first EOF over the region 
    20S-20N, 40E-110E.
    See Yang et al (2007) doi:10.1029/2006GL028571"""
    data[np.abs(data) > 1e3] = np.nan
    annual_cycle_removed = remove_annual_cycle(data, times, t_units, calendar)
    lat_min, lat_max = -20, 20
    lon_min, lon_max = 40, 110
    lat_mask = (lats >= lat_min) & (lats <= lat_max)
    lon_mask = (lons >= lon_min) & (lons <= lon_max)
    IO_SST = annual_cycle_removed[:, lat_mask, :][:, :, lon_mask]
    coslat = np.cos(np.deg2rad(lats[lat_mask]))
    wgts = np.sqrt(coslat)[..., np.newaxis]
    solver = Eof(IO_SST, weights=wgts)
    IOBM = solver.pcs(npcs=1, pcscaling=1).flatten()
    EOF1 = solver.eofs(neofs=1)[0, :, :]
    if np.nanmean(EOF1) < 0: IOBM = -IOBM
    IOBM = (IOBM - np.mean(IOBM)) / np.std(IOBM)
    return IOBM
def calculate_IPO(data, lats, lons, times, t_units, calendar):
    """ Calculate the Inter-decadal Pacific Oscillation index 
    Calculated as the first EOF of SST 60S to 60N over the
    Pacific  """
    data[np.abs(data) > 1e3] = np.nan  # set unreasonably high values to NaN
    annual_cycle_removed = remove_annual_cycle(data, times, t_units, calendar)
    lat_min, lat_max = -60, 60
    lon_min, lon_max = 120, 270
    lat_mask = (lats >= lat_min) & (lats <= lat_max)
    lon_mask = (lons >= lon_min) & (lons <= lon_max)
    Pacific_SST = annual_cycle_removed[:, lat_mask, :][:, :, lon_mask]
    coslat = np.cos(np.deg2rad(lats[lat_mask]))
    wgts = np.sqrt(coslat)[..., np.newaxis]
    solver = Eof(Pacific_SST, weights=wgts)
    IPO = solver.pcs(npcs=1, pcscaling=1).flatten()
    EOF1 = solver.eofs(neofs=1)[0, :, :]
    if np.nanmean(EOF1) < 0: IPO = -IPO
    IPO = (IPO - np.mean(IPO)) / np.std(IPO)
    return IPO
Ejemplo n.º 19
0
def eofs_as(dat):
    A = climatologia_xarray(dat['curl']).values
    global land
    EC, WC, land = get_coasts(dat.lat, dat.lon)

    msk = np.empty(np.shape(A))
    for i in range(0, len(A[:,0,0])):
        msk[i,:,:] = land
        B = np.ma.array(A, mask=msk)
    from get_eddof import get_eddof
    edof = np.empty([len(dat.lat), len(dat.lon)])
    for i in range(0, len(dat.lat)):
        for j in range(0, len(dat.lon)):
            if msk[0,i,j] == False:
                edof[i,j] = get_eddof(B[:,i,j])
            else:
                edof[i,j] = np.nan

    dof = int(np.nanmean(edof))
    coslat = np.cos(np.deg2rad(dat.lat.values)).clip(0., 1.)
    wgts = np.sqrt(coslat)[..., np.newaxis]
    solver = Eof(B, center=True, weights=wgts, ddof=dof)

    eof = solver.eofs(neofs=10, eofscaling=2)
    pc = solver.pcs(npcs=10, pcscaling=1)
    varfrac = solver.varianceFraction()
    eigvals = solver.eigenvalues()

    x, y = np.meshgrid(dat.lon, dat.lat)

    return eof, pc, varfrac, x, y, edof
def calculate_PDO(data, lats, lons, times, t_units, calendar):
    """ Calculate the Pacific Decadal Oscillation index as the first PC of SST
    between 20N and 70N
    See Newman et al (2016) doi:10.1175/JCLI-D-15-0508.1"""
    data[np.abs(data) > 1e3] = np.nan  # set unreasonably high values to NaN
    global_mean_removed = data - global_mean(data, lats).reshape(
        times.shape[0], 1, 1)
    annual_cycle_removed = remove_annual_cycle(global_mean_removed, times,
                                               t_units, calendar)
    lat_min, lat_max = 20, 70
    lon_min, lon_max = 120, 270
    lat_mask = (lats >= lat_min) & (lats <= lat_max)
    lon_mask = (lons >= lon_min) & (lons <= lon_max)
    N_Pacific_SST = annual_cycle_removed[:, lat_mask, :][:, :, lon_mask]
    coslat = np.cos(np.deg2rad(lats[lat_mask]))
    wgts = np.sqrt(coslat)[..., np.newaxis]
    solver = Eof(N_Pacific_SST, weights=wgts)
    EOF1 = solver.eofs(neofs=1)[0, :, :]
    PDO = solver.pcs(npcs=1, pcscaling=1).flatten()
    if np.nanmean(EOF1[:, lons[lon_mask] > 210]) < 0: PDO = -PDO
    PDO = (PDO - np.mean(PDO)) / np.std(PDO)
    return PDO
def calc_HadISST_monthly_residual_EOFs(histo_sy, histo_ey, ref_start, ref_end, run_n, n_eofs=22):
    # load the already calculated residuals
    resid_fname = get_HadISST_monthly_residuals_fname(histo_sy, histo_ey, run_n)
    # note that we don't have to subtract the annual cycle any more as the
    # residuals are with respect to a smoothed version of the monthly ssts
    
    resid_mon_fh = netcdf_file(resid_fname, 'r')
    sst_var = resid_mon_fh.variables["sst"]
    lats_var = resid_mon_fh.variables["latitude"]
    lons_var = resid_mon_fh.variables["longitude"]
    attrs = sst_var._attributes
    mv = attrs["_FillValue"]
    ssts = numpy.array(sst_var[:])
    sst_resids = numpy.ma.masked_less(ssts, -1000)
    
    # calculate the EOFs and PCs
    # take the eofs
    coslat = numpy.cos(numpy.deg2rad(lats_var[:])).clip(0., 1.)
    wgts = numpy.sqrt(coslat)[..., numpy.newaxis]
    eof_solver = Eof(sst_resids, center=True, weights=wgts)
    pcs = eof_solver.pcs(npcs=n_eofs)
    eofs = eof_solver.eofs(neofs=n_eofs)
    varfrac = eof_solver.varianceFraction(neigs=n_eofs)
    evs = eof_solver.eigenvalues(neigs=n_eofs)
    evs = evs.reshape([1,evs.shape[0]])
    print evs.shape
    
    # get the output names
    out_eofs_fname = get_HadISST_monthly_residual_EOFs_fname(histo_sy, histo_ey, run_n)
    out_pcs_fname  = get_HadISST_monthly_residual_PCs_fname(histo_sy, histo_ey, run_n)
    out_evs_fname  = get_HadISST_monthly_residual_EVs_fname(histo_sy, histo_ey, run_n)

    # save the eofs and pcs
    save_3d_file(out_eofs_fname, eofs, attrs, lats_var, lons_var)
    out_pcs = pcs.reshape([pcs.shape[0],1,pcs.shape[1]])
    save_pcs(out_pcs_fname, out_pcs, attrs)
    save_eigenvalues(out_evs_fname, evs, attrs)
    resid_mon_fh.close()
Ejemplo n.º 22
0
    def PCA(self, field_name):

        field_name = field_name
        start_interv = self.start_pca
        end_interv = self.end_pca
        observationPeriod = 'data_' + str(start_interv) + '_to_' + str(end_interv)
        modelData = np.load(self.directory_data + '/' + field_name + '_' + observationPeriod + '.npy')

        # Velocity is a 3D vector and needs to be reshaped before the PCA
        if 'Velocity' in field_name:
            modelData = np.reshape(modelData, (modelData.shape[0], modelData.shape[1] * modelData.shape[2]), order='F')

        # Standardise the data with mean 0
        meanData = np.nanmean(modelData, 0)
        stdData = np.nanstd(modelData)
        modelDataScaled = (modelData - meanData) / stdData

        #PCA solver
        solver = Eof(modelDataScaled)

        # Principal Components time-series
        pcs = solver.pcs()
        # Projection
        eof = solver.eofs()
        # Cumulative variance
        varianceCumulative = np.cumsum(solver.varianceFraction())

        np.save(self.directory_data + '/' + 'pcs_' + field_name + '_' + observationPeriod,
                pcs)
        np.save(self.directory_data + '/' + 'eofs_' + field_name + '_' + observationPeriod,
                eof)
        np.save(self.directory_data + '/' + 'varCumulative_' + field_name + '_' + observationPeriod,
                varianceCumulative)
        np.save(self.directory_data + '/' + 'mean_' + field_name + '_' + observationPeriod,
                meanData)
        np.save(self.directory_data + '/' + 'std_' + field_name + '_' + observationPeriod,
                stdData)
Ejemplo n.º 23
0
    # only keep region of interest?
    lato = lat
    lono = lon
    lat = lat[np.logical_and(lat >= latlims[0], lat <= latlims[1])]
    lon = lon[np.logical_and(lon >= lonlims[0], lon <= lonlims[1])]

    fldca1 = fldca1.reshape((oshape[0], len(lat), len(lon)))  # fldcash[0]/oshape[0])) # keep time dim
    fldca = fldca1
    fldpa1 = fldpa1.reshape((oshape[0], len(lat), len(lon)))
    fldpa = fldpa1

coslat = np.cos(np.deg2rad(lat)).clip(0.0, 1.0)  # why square root of cos lat? (better for EOF for some reason.)
wgts = np.sqrt(coslat)[..., np.newaxis]


solverc = Eof(fldca, weights=wgts)
if docorr:
    eof1c = solverc.eofsAsCorrelation(neofs=enum)
    eof1c = eof1c[enum - 1, ...]
else:
    eof1c = solverc.eofsAsCovariance(neofs=enum)
    eof1c = eof1c[enum - 1, ...]

eof1c = eof1c.squeeze()
eigsc = solverc.eigenvalues()
vexpc = eigsc[enum - 1] / eigsc.sum() * 100  # percent variance explained

fig, axs = plt.subplots(1, 4)
fig.set_size_inches(12, 5)
ax = axs[0]
cplt.kemmap(eof1c, lat, lon, type=type, title=sim + " control EOF" + str(enum), axis=ax, cmin=cmin, cmax=cmax)
Ejemplo n.º 24
0
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import cartopy.mpl.ticker as cticker
import cartopy.io.shapereader as shpreader
import xarray as xr
from eofs.standard import Eof
import numpy as np
f = xr.open_dataset('../data/pre.nc')
pre = np.array(f['pre'])
lat = f['lat']
lon = f['lon']
pre_lon = lon
pre_lat = lat
lat = np.array(lat)
coslat = np.cos(np.deg2rad(lat))
wgts = np.sqrt(coslat)[..., np.newaxis]
solver = Eof(pre, weights=wgts)
eof = solver.eofsAsCorrelation(neofs=3)
pc = solver.pcs(npcs=3, pcscaling=1)
var = solver.varianceFraction()
color1 = []
color2 = []
color3 = []
for i in range(1961, 2017):
    if pc[i - 1961, 0] >= 0:
        color1.append('red')
    elif pc[i - 1961, 0] < 0:
        color1.append('blue')
    if pc[i - 1961, 1] >= 0:
        color2.append('red')
    elif pc[i - 1961, 1] < 0:
        color2.append('blue')
Ejemplo n.º 25
0
    def EOF(self, metodo='correlacion', modo=1):
        window_eof = Toplevel(root, bg='brown')
        window_eof.geometry('250x400')
        window_eof.wm_iconbitmap('Globe.ico')
        nombre1 = StringVar(window_eof, 'm')
        nombre2 = StringVar(window_eof, 'estandarizada')
        nombre3 = IntVar(window_eof, 1)
        nombre4 = StringVar(window_eof, 'correlacion')
        label1 = Label(window_eof, text="Método de anomalía:",
                       width=15).place(x=10, y=100)
        label2 = Label(window_eof, text="Tipo de anomalía:",
                       width=15).place(x=10, y=150)
        label3 = Label(window_eof, text="Modo:", width=15).place(x=10, y=200)
        label4 = Label(window_eof, text="Método de la EOF:",
                       width=15).place(x=10, y=250)
        entry_box1 = Entry(window_eof, textvariable=nombre1,
                           width=15).place(x=140, y=100)
        entry_box2 = Entry(window_eof, textvariable=nombre2,
                           width=15).place(x=140, y=150)
        entry_box3 = Entry(window_eof, textvariable=nombre3,
                           width=15).place(x=140, y=200)
        entry_box3 = Entry(window_eof, textvariable=nombre4,
                           width=15).place(x=140, y=250)
        self.anomalias(metodo=nombre1.get(), anom=nombre2.get())
        lat = self.latitud[::-1]
        coslat = np.cos(np.deg2rad(lat))
        wgts = np.sqrt(coslat)[..., np.newaxis]
        solver = Eof(
            self.anomalia, weights=wgts
        )  ##SE NECESITA LA LIBRERIA eofs y tengo que escribir from eofs.standard import Eof
        #eof=np.ones((3,len(self.latitud),len(self.longitud)))
        if nombre4.get() == 'correlacion':
            self.eof = solver.eofsAsCorrelation(neofs=3)
            titulo = 'EOF por Correlacion - Modo ' + str(modo)
            barra = 'Correlacion'
        elif nombre4.get() == 'covarianza':
            self.eof = solver.eofsAsCovariance(neofs=3)
            titulo = 'EOF por Covarianza - Modo ' + str(modo)
            barra = 'Covarianza'

        def desplegar_eof1():
            self.pc = solver.pcs(npcs=nombre3.get(), pcscaling=1)
            plt.figure(figsize=(10, 4))
            plt.plot_date(self.dtime, self.pc, fmt='-')
            plt.grid()
            plt.ylabel('var.units')
            #plt.title('%s at Lon=%.2f, Lat=%.2f' % (vname, loni, lati))
            plt.savefig('pc.png')
            ven_graf_serie = Toplevel(window_eof)
            ven_graf_serie.minsize(400, 400)
            ven_graf_serie.wm_iconbitmap('Globe.ico')
            ven_graf_serie.title("Componente principal modo " +
                                 str(nombre3.get()))
            im = PIL.Image.open("pc.png")
            photo = PIL.ImageTk.PhotoImage(im)
            label = Label(ven_graf_serie, image=photo)
            label.image = photo  # keep a reference!
            label.pack()

        despliegue1 = Button(window_eof,
                             text="Desplegar Anomalia",
                             command=desplegar_eof1).place(x=60, y=300)

        m = bm.Basemap(llcrnrlon = self.longitud[0],llcrnrlat = self.latitud[len(self.latitud)-1],\
            urcrnrlon = self.longitud[len(self.longitud)-1],urcrnrlat = self.latitud[0],projection = 'merc')
        lat = self.latitud
        lon = self.longitud
        lon, lat = np.meshgrid(lon, lat)
        x, y = m(lon, lat)
        print np.shape(self.eof)

        def desplegar_eof2():
            fig = plt.figure(figsize=(8, 6))
            fig.add_axes([0.05, 0.05, 0.9, 0.85])
            csf = m.contourf(x,
                             y,
                             self.eof[nombre3.get() - 1, :, :].squeeze(),
                             np.arange(-1, 1, 0.1),
                             cmap='jet')
            m.drawcoastlines(linewidth=1.25, color='black')
            m.drawparallels(np.arange(-180, 180, 20),
                            labels=[1, 0, 0, 0],
                            color='gray')
            m.drawmeridians(np.arange(-180, 180, 20),
                            labels=[0, 0, 0, 1],
                            color='gray')
            cbar = m.colorbar(
                csf, location='right',
                size='3%')  #location puede ser top, left or right
            cbar.set_label(barra)
            plt.title(titulo)
            #plt.show()
            plt.savefig('eof.png')
            ven_graf_serie = Toplevel(window_eof)
            ven_graf_serie.minsize(400, 400)
            ven_graf_serie.wm_iconbitmap('Globe.ico')
            ven_graf_serie.title("Campo espacial de EOF")
            im = PIL.Image.open("eof.png")
            photo = PIL.ImageTk.PhotoImage(im)
            label = Label(ven_graf_serie, image=photo)
            label.image = photo  # keep a reference!
            label.pack()

        despliegue2 = Button(window_eof,
                             text="Campo espacial EOF",
                             command=desplegar_eof2).place(x=60, y=350)
Ejemplo n.º 26
0
timearray = []
#for i in time:
#timearray.append(dt.fromtimestamp(i))
for i in timearray:
    print(i)
gridfile = '/users/asmit101/data/stuff/myngbay_grd.nc'
ncgrid = NetCDFFile(gridfile)
lat = ncgrid.variables['lat_rho']
lon = ncgrid.variables['lon_rho']
print(chlorophyll1.ndim)
print(chlorophyll1.dims)

surfchl = chlorophyll1[:, 14, :, :]
chl_mean = surfchl.mean(axis=0)
anomaly = surfchl - chl_mean
solver = Eof(anomaly)
eof1 = solver.eofsAsCorrelation(neofs=1)
pc1 = solver.pcs(npcs=1, pcscaling=1)
plt.pcolormesh(lon, lat, eof1[0], cmap=plt.cm.RdBu_r)
plt.xlabel('Longitude')
plt.ylabel('Latitude')
plt.title('EOF1 expressed as Correlation')
cbar = plt.colorbar()
cbar.set_label('Correlation Coefficient', rotation=270)
plt.show()
plt.plot(timearray, pc1[:, 0])
plt.xlabel('Year')
plt.ylabel('Normalized Units')
plt.title('PC1 Time Series')
plt.show()
vF1 = solver.varianceFraction(neigs=6)
Ejemplo n.º 27
0
autoencoder = load_model('pwat_ae1000.h5') # 1,000 epochs
#
decoded_imgs = autoencoder.predict(test_data)  # prediction

# row dataset
#decoded_imgs = test_data  # row/test data

##### eof
# set up latitude
lat_array = np.asarray([x for x in range(-90, 91)])
cos_lat   = np.cos(np.deg2rad(lat_array))
weighted_lat = np.sqrt(cos_lat)[:,np.newaxis] # give a new axis by np.newaxis

# solver
ntime = decoded_imgs.shape[0]#/(lat*lon)
solver    = Eof(decoded_imgs.reshape(ntime,lat, lon), weights=weighted_lat)

#get analysis
eof1 = solver.eofsAsCorrelation(neofs=1)
pc1  = solver.pcs(npcs=1, pcscaling=1)
var1 = solver.varianceFraction(neigs=1)
print('Variance 1st mode %2.2f' %(var1) )
varall = solver.varianceFraction()
print('Variance all mode', varall )
print("Sum of all mode's variance  = ", sum(varall))

#plotting
lons  = np.asarray([x for x in range(0,lon)])
lats  = lat_array
clevs = np.linspace(-1,1,11)
fig   = plt.figure(figsize=(12,8))
Ejemplo n.º 28
0
lonnew = np.mod(lon, 360.0) - 180.0
lonq = np.where((lonnew >= -90) & (lonnew <= 40))[0]
lonnao = lonnew[lonq]

z5_diffn = z5_diff[:, :, latq, :]
z5_diffnao = z5_diffn[:, :, :, lonq]

z5n_h = np.nanmean(z500_h[:, 91:, latq, :], axis=0)
z5nao_h = z5n_h[:, :, lonq]

### Calculate NAO
# Create an EOF solver to do the EOF analysis. Square-root of cosine of
# latitude weights are applied before the computation of EOFs.
coslat = np.cos(np.deg2rad(latnao)).clip(0., 1.)
wgts = np.sqrt(coslat)[..., np.newaxis]
solver = Eof(z5nao_h, weights=wgts)

# Retrieve the leading EOF, expressed as the covariance between the leading PC
# time series and the input SLP anomalies at each grid point.
eof1 = solver.eofsAsCovariance(neofs=1).squeeze()
pc1 = solver.pcs(npcs=1, pcscaling=1).squeeze()


### Calculate NAO index
def NAOIndex(anomz5, eofpattern, members):
    """
    Calculate NAO index by regressing Z500 onto the EOF1 pattern
    """
    print('\n>>> Using NAO Index function!')

    if members == True:
Ejemplo n.º 29
0
    return i


filename1 = 'sine_wave_data1.nc'
filename2 = '2D_bulls_eyes.nc'
a = Dataset(filename1, mode='r')
b = Dataset(filename2, mode='r')
dataset1 = xr.open_dataset(xr.backends.NetCDF4DataStore(a))
dataset2 = xr.open_dataset(xr.backends.NetCDF4DataStore(b))
sinData = dataset1['data'].T
sinData = (sinData - sinData.mean(axis=0)) / sinData.std(axis=0)
sinData = sinData.values
bullseyeData = dataset2['data']

#%% EOF analysis
solver = Eof(sinData)
eigenvalues = solver.eigenvalues()  # Get eigenvalues
EOFs = solver.eofs(eofscaling=0)  # Get EOFs
EOFs_reg = solver.eofsAsCorrelation(
)  # Get EOFs as correlation b/w PCs &  orig data
PCs = solver.pcs(pcscaling=1)  # Get PCs

# Get variance explained and # of PCs
VarExplain = np.round(solver.varianceFraction() * 100, 1)
numPCs2Keep = cumSUM(VarExplain, 90)

# Calculate EOFs
EOF1 = EOFs[0, :] * np.sqrt(eigenvalues[0])  # Get EOF 1 & scale it
EOF2 = EOFs[1, :] * np.sqrt(eigenvalues[1])  # Get EOF 2 & scale it
EOF1_reg = EOFs_reg[0, :]
EOF2_reg = EOFs_reg[1, :]
Ejemplo n.º 30
0

# Read SST anomalies using the netCDF4 module. The file contains
# November-March averages of SST anomaly in the central and northern Pacific.
filename = example_data_path('sst_ndjfm_anom.nc')
ncin = Dataset(filename, 'r')
sst = ncin.variables['sst'][:]
lons = ncin.variables['longitude'][:]
lats = ncin.variables['latitude'][:]
ncin.close()

# Create an EOF solver to do the EOF analysis. Square-root of cosine of
# latitude weights are applied before the computation of EOFs.
coslat = np.cos(np.deg2rad(lats))
wgts = np.sqrt(coslat)[..., np.newaxis]
solver = Eof(sst, weights=wgts)

# Retrieve the leading EOF, expressed as the correlation between the leading
# PC time series and the input SST anomalies at each grid point, and the
# leading PC time series itself.
eof1 = solver.eofsAsCorrelation(neofs=1)
pc1 = solver.pcs(npcs=1, pcscaling=1)

# Plot the leading EOF expressed as correlation in the Pacific domain.
clevs = np.linspace(-1, 1, 11)
ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=190))
fill = ax.contourf(lons, lats, eof1.squeeze(), clevs,
                   transform=ccrs.PlateCarree(), cmap=plt.cm.RdBu_r)
ax.add_feature(cfeature.LAND, facecolor='w', edgecolor='k')
cb = plt.colorbar(fill, orientation='horizontal')
cb.set_label('correlation coefficient', fontsize=12)
Ejemplo n.º 31
0
surf = surf.data * mask  # surf in Pacific, 0 elsewhere
weights = surf / np.sum(surf)  # normalization of weights

# **Since EOF are based on covariance, the root-square of the weights must be used.**

weights = np.sqrt(weights)

# ## Computation of EOFS (standard mode)
#
# The EOFS can now be computed. First, an EOF solver must be initialized. **The `time` dimension must always be the first one when using numpy.array as inputs.**

# +
import eofs
from eofs.standard import Eof

solver = Eof(anoms_detrend, weights=weights)
# -

# Now, EOF components can be extracted. First, the covariance maps are extracted.

# +
neofs = 2
nlat, nlon = surf.shape
covmaps = solver.eofsAsCovariance(neofs=neofs)
print(type(covmaps))

plt.figure()
plt.subplot(211)
cs = plt.imshow(covmaps[0], cmap=plt.cm.RdBu_r)
cs.set_clim(-1, 1)
cb = plt.colorbar(cs)
Ejemplo n.º 32
0
# European/Atlantic domain (80W-40E, 20-90N).
filename = example_data_path('hgt_djf.nc')
ncin = Dataset(filename, 'r')
z_djf = ncin.variables['z'][:]
lons = ncin.variables['longitude'][:]
lats = ncin.variables['latitude'][:]
ncin.close()

# Compute anomalies by removing the time-mean.
z_djf_mean = z_djf.mean(axis=0)
z_djf = z_djf - z_djf_mean

# Create an EOF solver to do the EOF analysis. Square-root of cosine of
# latitude weights are applied before the computation of EOFs.
coslat = np.cos(np.deg2rad(lats)).clip(0., 1.)
wgts = np.sqrt(coslat)[..., np.newaxis]
solver = Eof(z_djf, weights=wgts)

# Retrieve the leading EOF, expressed as the covariance between the leading PC
# time series and the input SLP anomalies at each grid point.
eof1 = solver.eofsAsCovariance(neofs=1)

# Plot the leading EOF expressed as covariance in the European/Atlantic domain.
m = Basemap(projection='ortho', lat_0=60., lon_0=-20.)
x, y = m(*np.meshgrid(lons, lats))
m.contourf(x, y, eof1.squeeze(), cmap=plt.cm.RdBu_r)
m.drawcoastlines()
plt.title('EOF1 expressed as covariance', fontsize=16)

plt.show()
def calcSeasonalEOF(anomslp,years,year1,year2,monthind,eoftype,pctype):
    """
    Calculates EOF over defined seasonal period
    
    Parameters
    ----------
    anomslp : 4d array [year,month,lat,lon]
        sea level pressure anomalies
    years : 1d array
        years in total
    year1 : integer
        min month
    year2 : integer
        max month
    monthind : 1d array
        indices for months to be calculated in seasonal mean
    eoftype : integer
        1,2
    pctype : integer
        1,2
    
    Returns
    -------
    eof : array
        empirical orthogonal function
    pc : array
        principal components
    """
    print '\n>>> Using calcSeasonalEOF function!'
    
    ### Slice years
    if np.isfinite(year1):
        if np.isfinite(year2):
            yearqq = np.where((years >= year1) & (years <= year2))
            anomslp = anomslp[yearqq,:,:,:].squeeze()
        else:
            print 'Using entire time series for this EOF!'
    else:
        print 'Using entire time series for this EOF!'   
    print 'Sliced time period for seasonal mean!'
    
    ### Average over months
#    anomslp = anomslp[:,monthind,:,:]
#    anomslp = np.nanmean(anomslp[:,:,:,:],axis=1)
    
    print 'Sliced month period for seasonal mean!'
    
    anomslpq = anomslp
    
    pc = np.empty((anomslpq.shape[0],2,anomslpq.shape[1]))
    for i in xrange(anomslp.shape[1]):
        
        anomslp = anomslpq[:,i,:,:]

        ### Calculate EOF
        # Create an EOF solver to do the EOF analysis. Square-root of cosine of
        # latitude weights are applied before the computation of EOFs.
        coslat = np.cos(np.deg2rad(lats)).clip(0., 1.)
        wgts = np.sqrt(coslat)[..., np.newaxis]
        solver = Eof(anomslp, weights=wgts)
        
        # Retrieve the leading EOF, expressed as the covariance between the 
        # leading PC time series and the input SLP anomalies at each grid point.
        eof = solver.eofsAsCovariance(neofs=eoftype)
        pc[:,:,i] = solver.pcs(npcs=pctype, pcscaling=1)
        
        print 'EOF and PC computed!'
    
    print '*Completed: EOF and PC Calculated!\n'
    
    return eof,pc
Ejemplo n.º 34
0
#		print "\n Subset of 200..."
#		np.random.shuffle(filenames)
#		filenames=filenames[:200]


		data=load_regional(filenames,ny,nx)
		data=np.ma.masked_values(data,2.e+20)
		print "data loaded",data.shape
	
		# Set up info
		plt.set_cmap('RdBu')
		neofs=5
		nens=data.shape[0]
		nwanted=57
	
		solver=Eof(data)
		print 'set up EOF solver'
		pcs=solver.pcs(npcs=neofs,pcscaling=1)
		eofs=solver.eofs(neofs=neofs)
		varfrac=solver.varianceFraction(neigs=neofs)
		print 'calculated EOFs'
		print 'printing EOFs'
		for i in range(neofs):
			print 'EOF',i
			plt.clf()
			plot_region_pnw(eofs[i,:],lat_coord,lon_coord,0,-1,0,-1,'EOF'+str(i),varfrac[i])
		print "plotting histograms of PCs"
		for i in range(3):
			plt.clf()
			plt.hist(pcs[:,i],200,range=(-4,4),normed=1,alpha=0.4,label='pc'+str(i))
			plt.ylim([0,.6])
SAM_s4_ninia = np.zeros([5, index_ninio_s4_lower.values.sum()])
SAM_s4_sninia = np.zeros([5, index_sninio_s4_lower.values.sum()])
eof_erai = np.zeros([5, hgt_erai.latitude.values.shape[0]])
eof_s4 = np.zeros([5, hgt_s4.latitude.values.shape[0]])
sign_s4 = np.array([1, 1, -1, -1, -1])
sign_erai = np.array([1, -1, -1, -1, -1])
for i in np.arange(0, 5):
    aux = hgt_erai['z'].resample(time='QS-' + lmonth[i]).mean(dim='time',
                                                              skipna=True)
    mes = datetime.datetime.strptime(lmonth[i], '%b').month
    aux = aux.sel(time=np.logical_and(aux['time.month'] == mes,
                                      aux['time.year'] != 2002))
    aux = signal.detrend(aux, axis=0, type='linear')
    X_zm = np.mean(aux, axis=2)
    X_an = X_zm - np.mean(X_zm, axis=0)
    solver = Eof(X_an)
    pcs = solver.pcs(npcs=1, pcscaling=1)
    eof_erai[i, :] = solver.eofs(neofs=1)[0, :]
    SAM_erai[i, :] = sign_erai[i] * pcs[:, 0]
    SAM_erai_ninio[i, :] = sign_erai[i] * pcs[index_ninio_erai_upper, 0]
    SAM_erai_ninia[i, :] = sign_erai[i] * pcs[index_ninio_erai_lower, 0]
    SAM_erai_sninio[i, :] = sign_erai[i] * pcs[index_sninio_erai_upper, 0]
    SAM_erai_sninia[i, :] = sign_erai[i] * pcs[index_sninio_erai_lower, 0]
    hgt_s4_smean = np.nanmean(hgt_s4.z.values[i:i + 3, :, :, :], axis=0)
    hgt_s4_smean = signal.detrend(hgt_s4_smean, axis=0, type='linear')
    hgt_s4_smean = np.nanmean(hgt_s4_smean, axis=2)
    hgt_s4_smean = hgt_s4_smean - np.nanmean(hgt_s4_smean, axis=0)
    # Create an EOF solver to do the EOF analysis. Square-root of cosine of
    # latitude weights are applied before the computation of EOFs.
    solver = Eof(hgt_s4_smean)  #, weights=wgts)
    pcs = solver.pcs(npcs=1, pcscaling=1)
Ejemplo n.º 36
0
        print tfname
    # Open the file
    cnc = camgoda(cfull_path)
    tnc = camgoda(tfull_path)
    is3d, var, vname = cnc.ExtractData(variable, box)
    is3d, var, vname = tnc.ExtractData(variable, box)
    if n == 0:
        nlats, nlons = cnc.data.shape
        boxlat = cnc.boxlat
        boxlon = cnc.boxlon
        d = np.zeros(shape=(len(dates), nlats * nlons))
    d[n, :] = np.ndarray.flatten(tnc.data - cnc.data)

# Compute the amplitude timeseries and EOF spatial distributions of the data array
print "Computing the EOF..."
EOF = Eof(d, center=removeMeans)
eof = EOF.eofs(neofs=num_eofs)
pca = EOF.pcs(npcs=num_eofs, pcscaling=1)
varfrac = EOF.varianceFraction()
print "Finished!"

# Reshape F into a spatial grid
eof_grid = np.reshape(eof, (eof.shape[0], nlats, nlons))

# Make the maps
bmlon, bmlat = np.meshgrid(boxlon, boxlat)
southern_lat = boxlat[0]
northern_lat = boxlat[-1]
left_lon = boxlon[0]
right_lon = boxlon[-1]
if 0 in boxlon[1:-2]:  # if we cross the gml
Ejemplo n.º 37
0
def main(mFilepath, xFilepath, yFilepath, window, windowFlag=True):
    ## load in the data matrix as a numpy array
    m = np.loadtxt(mFilepath, dtype='float', delimiter=',', skiprows=1)
    # lon = np.loadtxt(xFilepath, dtype='float', delimiter=',', skiprows=1)
    # lat = np.loadtxt(yFilepath, dtype='float', delimiter=',', skiprows=1)
    # time =  np.arange('1958-01-01', '2014-09-22', dtype='datetime64')
    # years = range(1958, 2014)
    ## Create a list of dates spanning the study period
    base = dt.datetime(2014, 9, 21, 1, 1, 1, 1)
    dates = [base - dt.timedelta(days=x) for x in range(0, 20718)]
    date_list = [item for item in reversed(dates)]


    ## attempted to read in the raw data, but was struggling with
    ## the array dimensions
    # ncFiles = os.listdir(workspace)
    # slpList, lonList, latList, timeList = [], [], [], []
    # for fileIn in ncFiles:
    #     ncIn = Dataset(os.path.join(workspace, fileIn), 'r')
    #     slpList.append(ncIn.variables['slp'][:]/100)
    #     lonList.append(ncIn.variables['lon'][:])
    #     latList.append(ncIn.variables['lat'][:])
    #     timeList.append(ncIn.variables['time'][:])
    #     ncIn.close()

    # slp = np.array(slpList)
    # print(slp)
    # print(slp.shape)
    # # print(slp)
    # # print(np.shape(slp))

    ## create an EOF solver object and extrac the first
    ## 4 EOFs and their associated PCs. Scaling can be 
    ## applied if desired
    ## http://ajdawson.github.io/eofs/api/eofs.standard.html#eofs.standard.Eof
    solver = Eof(m)
    eofs = solver.eofs(neofs=4, eofscaling=0)
    pcs = solver.pcs(npcs=4, pcscaling=0)

    # lon, lat = np.meshgrid(lon, lat)

    ## plot the EOFs as nongeographic data for simplicity
    fig = plt.figure(figsize=(10, 10))
    for i in range(4):
        ax = fig.add_subplot(2, 2, i+1)
        lab = 'EOF' + str(i + 1)
        main =  'Unscaled ' + lab

        eofPlot = eofs[i,].reshape(17, 32)

        plt.imshow(eofPlot, cmap=plt.cm.RdBu_r)
        plt.title(main)
        cb = plt.colorbar(orientation='horizontal', cmap=plt.cm.RdBu_r)
        cb.set_label(lab, fontsize=12)

        ## Basemap failure below.  Something with the y cell size went wrong
        # bm = Basemap(projection='cyl', llcrnrlat=16.17951, urcrnrlat=68.48459,
        #              llcrnrlon=-176.0393, urcrnrlon=-98.07901, resolution='c')

        # # bm.contourf(x, y, eof1.squeeze(), clevs, cmap=plt.cm.RdBu_r)
        # bm.drawcoastlines()
        # bm.drawstates()
        # im = bm.pcolormesh(lon, lat, eofPlot, cmap=plt.cm.RdBu_r, latlon=True)
        # # bm.fillcontinents(color='coral', lake_color='aqua')
        # bm.drawparallels(np.arange(-90.,91.,15.))
        # bm.drawmeridians(np.arange(-180.,181.,30.))
        # # bm.drawmapboundary(fill_color='aqua')
        # cb = plt.colorbar(orientation='horizontal')
        # cb.set_label(lab, fontsize=12)
        # plt.title(main, fontsize=16)
        # plt.show()
    plt.show()
    ## Plot the PCs as a time series
    fig = plt.figure(figsize=(16, 16))
    for i in range(4):
        ylab = 'PC' + str(i+1)
        title = ylab + ' Time Series'

        pcPlot = pcs[:,i]
        if i==0:
            theAx = fig.add_subplot(4, 1, i+1)
            plt.setp(theAx.get_xticklabels(), visible=False)
            theAx.set_xlabel('')
        if i>0 and i<3:
            ax = fig.add_subplot(4, 1, i+1, sharex=theAx)
            plt.setp(ax.get_xticklabels(), visible=False)
        if i==3:
            ax = fig.add_subplot(4, 1, i+1, sharex=theAx)
            plt.xlabel('Date')

        plt.plot(date_list, pcPlot, color='b')
        if windowFlag:
            plt.plot(date_list, movingaverage(pcPlot, window), 
                     color='r', linestyle='-')
        plt.axhline(0, color='k')
        plt.title(title)
        plt.ylabel(ylab)
    plt.show()

    ## Subset the dates to the last year of the dataset
    short_date = [item for item in date_list if 
                  item >= dt.datetime(2013, 6, 17) 
                  and item < dt.datetime(2014, 6, 25)]
    indices = [date_list.index(item) for item in short_date]

    fig = plt.figure(figsize=(16, 16))
    ## Plot out the last year of the PCs to get a more detailed
    ## pattern for comparison to the R results
    for i in range(4):
        ylab = 'PC' + str(i+1)
        title = ylab + ' Time Series (1 year)'

        pcPlot = pcs[np.array(indices),i]
        if i==0:
            theAx = fig.add_subplot(4, 1, i+1)
            plt.setp(theAx.get_xticklabels(), visible=False)
            theAx.set_xlabel('')
        if i>0 and i<3:
            ax = fig.add_subplot(4, 1, i+1, sharex=theAx)
            plt.setp(ax.get_xticklabels(), visible=False)
        if i==3:
            ax = fig.add_subplot(4, 1, i+1, sharex=theAx)
            plt.xlabel('Date')

        plt.plot(short_date, pcPlot, color='b')
        
        if windowFlag:
            plt.plot(short_date, 
                     movingaverage(pcPlot, window), color='r')

        plt.axhline(0, color='k')
        plt.title(title)
        plt.ylabel(ylab)
    plt.show()

        ## Subset the dates to the last year of the dataset
    decade = [item for item in date_list if 
              item >= dt.datetime(2004, 6, 17) 
                  and item < dt.datetime(2014, 6, 17)]
    decadeIndices = [date_list.index(item) for item in decade]

    fig = plt.figure(figsize=(16, 16))
    ## Plot out the last year of the PCs to get a more detailed
    ## pattern for comparison to the R results
    for i in range(4):
        ylab = 'PC' + str(i+1)
        title = ylab + ' Time Series (1 decade)'

        pcPlot = pcs[np.array(decadeIndices),i]
        if i==0:
            theAx = fig.add_subplot(4, 1, i+1)
            plt.setp(theAx.get_xticklabels(), visible=False)
            theAx.set_xlabel('')
        if i>0 and i<3:
            ax = fig.add_subplot(4, 1, i+1, sharex=theAx)
            plt.setp(ax.get_xticklabels(), visible=False)
        if i==3:
            ax = fig.add_subplot(4, 1, i+1, sharex=theAx)
            plt.xlabel('Date')

        plt.plot(decade, pcPlot, color='b')
        if windowFlag:
            plt.plot(decade, 
                     movingaverage(pcPlot, window), color='r')
        plt.axhline(0, color='k')
        plt.title(title)
        plt.ylabel(ylab)
    plt.show()
Ejemplo n.º 38
0
def EP_CPindex(SSTA, lat, lon):
    """
    EP_CPindex method to define CP&EP
    FOR CP: regression of Nino1+2 SSTA associated with eastern warming is removed
    FOR EP: regression of Nino 4 SSTA associated with cetral warming is removed
    both EOF to find 1st PC time series (exceed on standard deviation-1 sigma)
    
    SSTA with (time,lat,lon) with masked values and nan
    """
    #Nino1+2:90W-80W(135-140),0-10S(45-50)
    ssta12 = SSTA[:, (lat <= 10) & (lat >= 0), :]
    Nino12 = np.ma.average(ssta12[:, :, (lon <= 280) & (lon >= 270)], (1, 2))
    Nino12 = np.ma.getdata(Nino12)

    #Nino4:160E-150W(80-105),5N-5S(43-47)
    ssta4 = SSTA[:, (lat <= 5) & (lat >= -5), :]
    Nino4 = np.ma.average(ssta4[:, :, (lon <= 210) & (lon >= 160)], (1, 2))
    Nino4 = np.ma.getdata(Nino4)

    #tropical pacific:120E-80W(60-140),20S-20N(35-55)
    SSTA_TP = SSTA[:, (lat <= 30) & (lat >= -30), :]
    SSTA_TP = SSTA_TP[:, :, (lon <= 280) & (lon >= 120)]

    lat1 = lat[(lat <= 30) & (lat >= -30)]
    lon1 = lon[(lon <= 280) & (lon >= 120)]

    SSTA_TP12 = np.zeros(SSTA_TP.shape)
    SSTA_TP4 = np.zeros(SSTA_TP.shape)

    for i in range(0, SSTA_TP.shape[1]):
        for j in range(0, SSTA_TP.shape[2]):
            k12, _, _, _, _ = stats.linregress(Nino12, SSTA_TP[:, i, j])
            SSTA_TP12[:, i, j] = SSTA_TP[:, i, j] - k12 * Nino12
            k4, _, _, _, _ = stats.linregress(Nino4, SSTA_TP[:, i, j])
            SSTA_TP4[:, i, j] = SSTA_TP[:, i, j] - k4 * Nino4

    #EOF analysis
    #coslat=np.cos(np.deg2rad(np.arange(-20,21,2)))
    #wgt=np.sqrt(coslat)[..., np.newaxis]
    solver12 = Eof(SSTA_TP12)
    eof12 = solver12.eofsAsCorrelation(neofs=1)
    PC12 = solver12.pcs(npcs=1, pcscaling=1)
    PC12 = PC12[:, 0]
    a = eof12[:, (lat1 <= 5) & (lat1 >= -5), :]
    if np.mean(a[:, :, (lon1 <= 240) & (lon1 >= 190)].squeeze(), (0, 1)) < 0:
        PC12 = -PC12

    solver4 = Eof(SSTA_TP4)
    eof4 = solver4.eofsAsCorrelation(neofs=1)
    PC4 = solver4.pcs(npcs=1, pcscaling=1)
    PC4 = PC4[:, 0]
    b = eof4[:, (lat1 <= 5) & (lat1 >= -5), :]

    if np.mean(b[:, :, (lon1 <= 240) & (lon1 >= 190)].squeeze(), (0, 1)) < 0:
        PC4 = -PC4

    #PC12 is for cp definition and PC4 is for EP

    #standardized


# =============================================================================
#     pc12_std=(PC12-np.mean(PC12))/np.std(PC12)
#     pc4_std=(PC4-np.mean(PC4))/np.std(PC4)
# =============================================================================
# =============================================================================
#     pc12=pd.Series(PC12[:,0])
#     pc4=pd.Series(PC4[:,0])
#     pc12_std=(pc12-pc12.rolling(window=30).mean())/pc12.rolling(window=30).std()
#     pc4_std=(pc4-pc4.rolling(window=30).mean())/pc4.rolling(window=30).std()
# =============================================================================

    return PC12, PC4  #CP, EP
Ejemplo n.º 39
0
def calculate_correlations_and_pvalues(var_pairs, label_to_vname_to_season_to_yearlydata: dict, season_to_months: dict,
                                       region_of_interest_mask, lakes_mask=None, lats=None) -> dict:
    """

    :param var_pairs:
    :param label_to_vname_to_season_to_yearlydata:
    :param lats needed for weighting of eof solver
    :return: {(vname1, vname2): {label: {season: [corr, pvalue]}}}}
    """
    res = {}
    for pair in var_pairs:
        pair = tuple(pair)

        res[pair] = {}

        for label in label_to_vname_to_season_to_yearlydata:

            res[pair][label] = {}
            for season in season_to_months:

                years_sorted = sorted(label_to_vname_to_season_to_yearlydata[label][pair[0]][season])

                v1_dict, v2_dict = [label_to_vname_to_season_to_yearlydata[label][pair[vi]][season] for vi in range(2)]
                v1 = np.array([v1_dict[y] for y in years_sorted])
                v2 = np.array([v2_dict[y] for y in years_sorted])

                r = np.zeros(v1.shape[1:]).flatten()
                p = np.ones_like(r).flatten()

                v1 = v1.reshape((v1.shape[0], -1))
                v2 = v2.reshape((v2.shape[0], -1))

                # for hles and ice fraction get the eof of the ice and correlate
                if pair == ("hles_snow", "lake_ice_fraction"):
                    # assume that v2 is the lake_ice_fraction
                    v_lake_ice = v2

                    positions_hles_region = np.where(region_of_interest_mask.flatten())[0]
                    positions_lakes = np.where(lakes_mask.flatten())[0]

                    v_lake_ice = v_lake_ice[:, positions_lakes]
                    # calculate anomalies
                    v_lake_ice = v_lake_ice - v_lake_ice.mean(axis=0)

                    weights = np.cos(np.deg2rad(lats.flatten()[positions_lakes])) ** 0.5

                    solver = Eof(v_lake_ice, weights=weights[..., np.newaxis])
                    print(label, solver.varianceFraction(neigs=10))


                    # use the module of the PC1 to make sure it has physical meaning
                    pc1_ice = solver.pcs(npcs=1)[:, 0]

                    # debug: plot eof
                    eofs = solver.eofs(neofs=1)

                    eof_2d = np.zeros_like(lats).flatten()
                    eof_2d[positions_lakes] = eofs[:, 0] * pc1_ice
                    eof_2d = eof_2d.reshape(lats.shape)

                    plt.figure()
                    im = plt.pcolormesh(eof_2d.T)
                    plt.colorbar(im)
                    plt.show()

                    if True:
                        raise Exception


                    # print(positions)
                    for i in positions_hles_region:
                        r[i], p[i] = pearsonr(v1[:, i], pc1_ice)

                else:

                    positions = np.where(region_of_interest_mask.flatten())

                    # print(positions)
                    for i in positions[0]:
                        r[i], p[i] = pearsonr(v1[:, i], v2[:, i])

                r.shape = region_of_interest_mask.shape
                p.shape = region_of_interest_mask.shape

                r = np.ma.masked_where(~region_of_interest_mask, r)
                p = np.ma.masked_where(~region_of_interest_mask, p)

                res[pair][label][season] = [r, p]

    return res
Ejemplo n.º 40
0
filename = example_data_path('hgt_djf.nc')
ncin = Dataset(filename, 'r')
z_djf = ncin.variables['z'][:]
lons = ncin.variables['longitude'][:]
lats = ncin.variables['latitude'][:]
ncin.close()

# Compute anomalies by removing the time-mean.
z_djf_mean = z_djf.mean(axis=0)
z_djf = z_djf - z_djf_mean

# Create an EOF solver to do the EOF analysis. Square-root of cosine of
# latitude weights are applied before the computation of EOFs.
coslat = np.cos(np.deg2rad(lats)).clip(0., 1.)
wgts = np.sqrt(coslat)[..., np.newaxis]
solver = Eof(z_djf, weights=wgts)

# Retrieve the leading EOF, expressed as the covariance between the leading PC
# time series and the input SLP anomalies at each grid point.
eof1 = solver.eofsAsCovariance(neofs=1)

# Plot the leading EOF expressed as covariance in the European/Atlantic domain.
clevs = np.linspace(-75, 75, 11)
proj = ccrs.Orthographic(central_longitude=-20, central_latitude=60)
ax = plt.axes(projection=proj)
ax.set_global()
ax.coastlines()
ax.contourf(lons, lats, eof1.squeeze(), levels=clevs,
            cmap=plt.cm.RdBu_r, transform=ccrs.PlateCarree())
plt.title('EOF1 expressed as covariance', fontsize=16)
Ejemplo n.º 41
0
season = ['ASO', 'SON', 'OND', 'NDJ', 'DJF']
lmonth = ['Aug', 'Sep', 'Oct', 'Nov', 'Dec']
#loop over seasons, selec data and performs boxplot
SAM_erai = np.zeros([5, 36])
SAM_s4 = np.zeros([5, len(hgt_s4.realiz.values)])
eof_erai = np.zeros([5, len(hgt_erai.latitude.values)])
eof_s4 = np.zeros([5, len(hgt_s4.latitude.values)])
sign_s4 = np.array([1, 1, 1, 1, -1])
sign_erai = np.array([1, -1, -1, -1, 1])
for i in np.arange(0, 5):
	aux = hgt_erai['z'].resample(time='QS-' + lmonth[i]).mean(dim='time',skipna=True)
	mes = datetime.datetime.strptime(lmonth[i], '%b').month
	aux = aux.sel(time= np.logical_and(aux['time.month'] == mes, aux['time.year']!=2002))
	X_zm = aux.mean(dim='longitude')
	X_an = X_zm - X_zm.mean(dim='time')
	solver = Eof(X_an.values)
	pcs = solver.pcs(npcs=1, pcscaling=1)
	eof_erai[i, :] = solver.eofs(neofs=1)[0,:]
	SAM_erai[i, :] = sign_erai[i] * pcs[:, 0]
	hgt_s4_smean = np.nanmean(np.nanmean(hgt_s4.z.values[i:i + 3, :, :, :], axis=0), axis=2)
	hgt_s4_smean = hgt_s4_smean - np.nanmean(hgt_s4_smean, axis=0)
	solver = Eof(hgt_s4_smean)
	pcs = solver.pcs(npcs=1, pcscaling=1)
	eof_s4[i, :] = solver.eofs(neofs=1)[0,:]
	SAM_s4[i, :] = sign_s4[i] * pcs[:, 0]

time = np.concatenate([np.arange(1981,2002), np.arange(2003,2018)])
ds = xr.Dataset({'SAM_index': xr.DataArray(SAM_erai, coords=[('season', season),('year', time)])})

ds.to_netcdf(RUTA + 'fogt/SAM_index_erai.nc4')