def correlations_lag_lat_or_lon(E,maxlag,lat_or_lon = 'lon',filter_order=50,climatology_option='NODA',hostname='taurus',verbose=False): """ compute correlations between U850 or OLR in a reference are and everywhere else, as a function of lag and either latitude or longitude INPUTS: E - a standard DART experiment dictionary, with the variable field and level range corresponding to some MJO variable maxlag: the limit of the lag (in days) that we look at lat_or_lon: choose dimension to preserve after averaging -- 'lat' or 'lon' climatology_option: choose which climatology to take the anomalies to respect with -- default is "NODA" """ # change the given daterange to daily resolution, because the lag is specified in days E['daterange'] = dart.change_daterange_to_daily(E['daterange']) # compute or load the daily climatology and deviation from climatology anomalies,climatology,lat,lon,lev,DRnew = ano(E,climatology_option = climatology_option,hostname=hostname,verbose=verbose) # filter daily anomalies using a Lanczos filter AA,FA = filter(anomalies,filter_order,return_as_vector=False) if E['variable'] == 'U': variable_name = 'U'+str(E['levrange'][0]) else: variable_name = E['variable'] # compute the zonal and meridional mean of the resulting field # the regions we average over depend on whether we want lag-lat, or lag-lon plots # also, note thatm by how the filtered anomalies are constructed, the 3rd dimension is always time if lat_or_lon == "lon": # select latitudes 10S-10N and average meridionally, then plot correlations as a function of lon lat1,lon1,FAm = aave('TB',FA,lat,lon,None,variable_name,averaging_dimension='lat') if lat_or_lon == "lat": # average over the longitude corridor 80-100E and plot correlations as a function of lat lat1,lon1,FAm = aave('ZB',FA,lat,lon,None,variable_name,averaging_dimension='lon') # area averaging the desired variable over the Indian Ocean reference point if (E['daterange'][0].month >= 10) or (E['daterange'][0].month <= 2): season = 'winter' else: season = 'summer' lat0,lon0,FA0 = aave('IO',FA,lat,lon,season,variable_name,averaging_dimension="all") #------ compute field of correlation coefficients # empty array size Lag by Lat # plus an array to keep track of sample size Lag_range = range(-maxlag,maxlag+1) nlag = len(Lag_range) n = FAm.shape[0] R = np.zeros(shape=(nlag,n)) S = np.zeros(shape=(nlag,n)) # loop over latitudes T = len(FA0) for ii in range(n): # loop over lags for ilag,L in zip(range(nlag),Lag_range): # the time points that we can check go from L to T-L # so shorter lags have a larger sample size and are more significant. if L < 0: Tsel = range(-L,T) if L > 0: Tsel = range(0,T-L) if L == 0: Tsel = range(0,T) # loop over the available time points and gather values to compare IO = [] X = [] for k in Tsel: IO.append(FA0[k+L]) X.append(FAm[ii,k]) # now compute the correlation from this list of samples and store in the lag vs lat array rho = np.corrcoef(X,IO) if rho != []: R[ilag,ii] = rho[1,0] S[ilag,ii] = len(IO) else: R[ilag,ii] = np.nan S[ilag,ii] = np.nan if lat_or_lon == 'lon': space_dim = lon1 if lat_or_lon == 'lat': space_dim = lat1 L = np.array(Lag_range) return R,S,L,space_dim