scpdsi_lmr_mean[region][:] = np.nan
    pr_lmr_mean[region] = np.zeros((nyears_lmr, niter_lmr))
    pr_lmr_mean[region][:] = np.nan
    tas_lmr_mean[region] = np.zeros((nyears_lmr, niter_lmr))
    tas_lmr_mean[region][:] = np.nan
    #
    print('Averaging over ' + region)
    lat_min = region_bounds[region][0]
    lat_max = region_bounds[region][1]
    lon_min = region_bounds[region][2]
    lon_max = region_bounds[region][3]
    for i in range(niter_lmr):
        print(' === Computing means for iteration ' + str(i + 1) + '/' +
              str(niter_lmr) + ' ===')
        scpdsi_lmr_mean[region][:, i] = compute_regional_means.compute_means(
            scpdsi_lmr_all[:, i, :, :], lat_lmr, lon_lmr, lat_min, lat_max,
            lon_min, lon_max)
        pr_lmr_mean[region][:, i] = compute_regional_means.compute_means(
            pr_lmr_all[:, i, :, :], lat_lmr, lon_lmr, lat_min, lat_max,
            lon_min, lon_max)
        tas_lmr_mean[region][:, i] = compute_regional_means.compute_means(
            tas_lmr_all[:, i, :, :], lat_lmr, lon_lmr, lat_min, lat_max,
            lon_min, lon_max)

# Compute means for the DaiPDSI data set as well.
pdsi_dai_mean = {}
for region in possible_regions:
    #
    print('Averaging over ' + region)
    lat_min = region_bounds[region][0]
    lat_max = region_bounds[region][1]
예제 #2
0
def correlation_maps(dataset,scpdsi_mean,sst_mean,zg_500hPa_mean,lat,lon,years,year_bounds):
    #
    # Import global variables
    global regions,region_bounds,calc_bounds,remove_influence_of_nino34,remove_influence_of_pdo,year_filter,save_instead_of_plot
    #
    # Set the variable of interest
    # The variables ares inverted so that the correlations show what's correlated to dry conditions, not wet conditions.
    reference_variable = 'PDSI'
    var_mean = -1*scpdsi_mean
    #
    # Load the Palmyra data and determine which years it covers.
    palmyra_data = np.loadtxt('/home/mpe32/analysis/5_drought/more/data/data_palmyra.txt')
    palmyra_years_with_data = palmyra_data[~np.isnan(palmyra_data[:,1]),0]
    palmyra_years_with_data_inrange = palmyra_years_with_data[(palmyra_years_with_data>=years[0]) & (palmyra_years_with_data<=years[-1])]
    #
    # Shorten the data to cover only the desired years.
    indices_chosen = np.where((years >= year_bounds[0]) & (years <= year_bounds[1]))[0]
    var_mean       = var_mean[indices_chosen,:,:]
    sst_mean       = sst_mean[indices_chosen,:,:]
    zg_500hPa_mean = zg_500hPa_mean[indices_chosen,:,:]
    years          = years[indices_chosen]
    palmyra_years_with_data_inrange = palmyra_years_with_data[(palmyra_years_with_data>=year_bounds[0]) & (palmyra_years_with_data<=year_bounds[1]) & (palmyra_years_with_data>=years[0]) & (palmyra_years_with_data<=years[-1])]
    #
    # Find the indices of years with/without Palmyra data.
    LMR_indices_palmyra = [years.tolist().index(year) for year in palmyra_years_with_data_inrange]
    LMR_indices_no_palmyra = list(set(range(len(years))) - set(LMR_indices_palmyra))
    #
    # If specified, take either years with Palmyra data or years without
    if year_filter == "only palmyra":
        var_mean       = var_mean[LMR_indices_palmyra,:,:]
        sst_mean       = sst_mean[LMR_indices_palmyra,:,:]
        zg_500hPa_mean = zg_500hPa_mean[LMR_indices_palmyra,:,:]
        years          = years[LMR_indices_palmyra]
    elif year_filter == "no palmyra":
        var_mean       = var_mean[LMR_indices_no_palmyra,:,:]
        sst_mean       = sst_mean[LMR_indices_no_palmyra,:,:]
        zg_500hPa_mean = zg_500hPa_mean[LMR_indices_no_palmyra,:,:]
        years          = years[LMR_indices_no_palmyra]
    #
    # Remove the mean of all years.
    var_mean       = var_mean       - np.mean(var_mean,axis=0)
    sst_mean       = sst_mean       - np.mean(sst_mean,axis=0)
    zg_500hPa_mean = zg_500hPa_mean - np.mean(zg_500hPa_mean,axis=0)
    #
    # Compute average PDSI for the U.S. and the four regions used in Cook et al. 2014.
    # Also, Compute means over the approximate California region (for comparison with Seager et al. 2015).
    # For a better comparison, find a less crude way of averaging over California.
    var_regional_mean = {}
    for region in regions:
        print(region)
        lat_min = region_bounds[region][0]
        lat_max = region_bounds[region][1]
        lon_min = region_bounds[region][2]
        lon_max = region_bounds[region][3]
        var_regional_mean[region] = compute_regional_means.compute_means(var_mean,lat,lon,lat_min,lat_max,lon_min,lon_max)
    #
    # For every region, compute the correlation between drought in that region and SSTs and 500hPa heights.
    if corr_calc_type == 'pearsonr':
        correlations_sst,      _,lat_not_sig_sst,      lon_not_sig_sst       = correlation_calc(var_regional_mean,sst_mean,      lat,lon)
        correlations_zg_500hPa,_,lat_not_sig_zg_500hPa,lon_not_sig_zg_500hPa = correlation_calc(var_regional_mean,zg_500hPa_mean,lat,lon)
    if corr_calc_type == 'isospectral':
        #
        # Set up dictionaries
        correlations_sst = {}
        lat_not_sig_sst  = {}
        lon_not_sig_sst  = {}
        correlations_zg_500hPa = {}
        lat_not_sig_zg_500hPa  = {}
        lon_not_sig_zg_500hPa  = {}
        #
        # Set options
        options = SET(nsim=1000,method='isospectral',alpha=0.05)
        #
        for region in regions:
            #
            # Compute the significance
            starttime = time.time()
            correlations_sst[region],      lat_not_sig_sst[region],      lon_not_sig_sst[region]       = corr_2d_ttest(sst_mean,      var_regional_mean[region],lat,lon,options,1)
            correlations_zg_500hPa[region],lat_not_sig_zg_500hPa[region],lon_not_sig_zg_500hPa[region] = corr_2d_ttest(zg_500hPa_mean,var_regional_mean[region],lat,lon,options,1)
            endtime = time.time()
            print('Time for isospectral calculation: '+str('%1.2f' % ((endtime-starttime)/60))+' minutes')
    #
    # Remove points over land for sst
    lat_not_sig_sst,lon_not_sig_sst = remove_points_over_land(sst_mean,lat_not_sig_sst,lon_not_sig_sst,lat,lon)
    #
    #
    ### FIGURES
    plt.style.use('ggplot')
    #
    # Map
    m = Basemap(projection='cyl',llcrnrlat=calc_bounds[0],urcrnrlat=calc_bounds[1],llcrnrlon=calc_bounds[2],urcrnrlon=calc_bounds[3],resolution='c')
    lon_2d,lat_2d = np.meshgrid(lon,lat)
    x, y = m(lon_2d,lat_2d)
    letters = ['a','e','b','f','c','g','d','h']
    #
    # Plot the correlation between the region of interest and the selected variable everywhere.
    f, ax = plt.subplots(len(regions),2,figsize=(14,14))
    ax = ax.ravel()
    #
    for var_num,variable in enumerate(['SST','500hPa heights']):
        if variable == 'SST':
            correlations_selected = correlations_sst
            lat_not_sig_selected  = lat_not_sig_sst
            lon_not_sig_selected  = lon_not_sig_sst
        elif variable == '500hPa heights':
            correlations_selected = correlations_zg_500hPa
            lat_not_sig_selected  = lat_not_sig_zg_500hPa
            lon_not_sig_selected  = lon_not_sig_zg_500hPa
        #
        # Set max and min values
        if (dataset == 'LMR NAonly/NAexclude') and (year_bounds == [1001,2000]): extreme_r = .2
        else:                                                                    extreme_r = 1
        #
        levels_r = np.linspace(-1*extreme_r,extreme_r,21)
        #
        for i,region in enumerate(regions):
            #
            panel = (i*2)+var_num
            #
            ax[panel].set_title(letters[panel]+") "+region+" region, "+variable,fontsize=20,loc='left')
            m = Basemap(projection='cyl',llcrnrlat=calc_bounds[0],urcrnrlat=calc_bounds[1],llcrnrlon=calc_bounds[2],urcrnrlon=calc_bounds[3],resolution='c',ax=ax[panel])
            if extreme_r == 1: image1 = m.contourf(x,y,correlations_selected[region],levels_r,cmap='RdBu_r',vmin=-1*extreme_r,vmax=extreme_r)
            else:              image1 = m.contourf(x,y,correlations_selected[region],levels_r,extend='both',cmap='RdBu_r',vmin=-1*extreme_r,vmax=extreme_r)
            #
            x2, y2 = m(lon_not_sig_selected[region],lat_not_sig_selected[region])
            m.plot(x2,y2,'ko',markersize=.5)
            cb = m.colorbar(image1,ax=ax[panel],location='bottom').set_label("Correlation",fontsize=18)
            m.drawparallels([0],labels=[True])
            m.drawcoastlines()
            #
            # Draw a box for the region of interest
            lat_min = region_bounds[region][0]
            lat_max = region_bounds[region][1]
            lon_min = region_bounds[region][2]
            lon_max = region_bounds[region][3]
            if lon_min < 0: lon_min = lon_min+360
            if lon_max < 0: lon_max = lon_max+360
            x_region,y_region = m([lon_min,lon_min,lon_max,lon_max],[lat_min,lat_max,lat_max,lat_min])
            xy_region = np.column_stack((x_region,y_region))
            region_box = Polygon(xy_region,edgecolor='black',facecolor='none',linewidth=2,alpha=.5)
            plt.gca().add_patch(region_box)
    #
#    f.suptitle("Correlations between regional drought and spatial climate everywhere, "+dataset+", years "+str(year_bounds[0])+"-"+str(year_bounds[1]),fontsize=20)
    f.tight_layout()
#    f.subplots_adjust(top=.93)
    if save_instead_of_plot == True:
        plt.savefig("figures/correlation_map_"+dataset.replace(" ","_").replace("/","_")+"_"+reference_variable+"_years_"+str(year_bounds[0])+"-"+str(year_bounds[1])+"_"+year_filter.replace(" ","_")+".png",dpi=300,format='png')
        plt.close()
    else:
        plt.show()
    #
    # Put the desired variables together into a single dictionary
    correlation_variables = {}
    correlation_variables['correlations_sst']       = correlations_sst
    correlation_variables['lat_not_sig_sst']        = lat_not_sig_sst
    correlation_variables['lon_not_sig_sst']        = lon_not_sig_sst
    correlation_variables['correlations_zg_500hPa'] = correlations_zg_500hPa
    correlation_variables['lat_not_sig_zg_500hPa']  = lat_not_sig_zg_500hPa
    correlation_variables['lon_not_sig_zg_500hPa']  = lon_not_sig_zg_500hPa
    correlation_variables['lat']                    = lat
    correlation_variables['lon']                    = lon
    correlation_variables['dataset']                = dataset
    correlation_variables['year_bounds']            = year_bounds
    correlation_variables['sst_mean']               = sst_mean
    correlation_variables['var_regional_mean']      = var_regional_mean
    #
    # Return the calculated values
    return correlation_variables
            nino34_ccsm4_lead1_selected, scpdsi_ccsm4_selected[:, j, i])[0, 1]

# Average over specific regions
pdsi_dai_mean = {}
pdsi_lmr_mean = {}
pdsi_ccsm4_mean = {}
for region in possible_regions:
    #
    print('Averaging over ' + region)
    lat_min = region_bounds[region][0]
    lat_max = region_bounds[region][1]
    lon_min = region_bounds[region][2]
    lon_max = region_bounds[region][3]
    #
    pdsi_dai_mean[region] = compute_regional_means.compute_means(
        pdsi_dai_annual_selected, lat_dai, lon_dai, lat_min, lat_max, lon_min,
        lon_max)
    pdsi_lmr_mean[region] = compute_regional_means.compute_means(
        scpdsi_lmr_selected, lat_lmr, lon_lmr, lat_min, lat_max, lon_min,
        lon_max)
    pdsi_ccsm4_mean[region] = compute_regional_means.compute_means(
        scpdsi_ccsm4_selected, lat_ccsm4, lon_ccsm4, lat_min, lat_max, lon_min,
        lon_max)

# Calculating correlations (and R^2 values) for specific regions.
r_regions_obs_annual = {}
r_regions_obs_annual_lead1 = {}
r_regions_obs_mam = {}
r_regions_obs_djf = {}
r_regions_lmr_annual = {}
r_regions_lmr_annual_lead1 = {}
# Load the volcanic forcing data, which runs from December, 500, to January, 2001.
handle_volc = xr.open_dataset(data_dir+'forcings/IVI2LoadingLatHeight501-2000_L18_c20100518.nc',decode_times=False)
colmass_volc = handle_volc['colmass'].values
date_volc    = handle_volc['date'].values
lat_volc     = handle_volc['lat'].values
handle_volc.close()

years_volc = np.arange(501,2001)


### CALCULATIONS

# Compute mean PDSI for the southwest U.S.
for experiment in experiments:
    pdsi[experiment+'_annual_sw'] = compute_regional_means.compute_means(pdsi[experiment+'_annual'],lat,lon,region_bounds_sw[0],region_bounds_sw[1],region_bounds_sw[2],region_bounds_sw[3])
#    pdsi[experiment+'_annual_southcentral'] = compute_regional_means.compute_means(pdsi[experiment+'_annual'],lat,lon,region_bounds_southcentral[0],region_bounds_southcentral[1],region_bounds_southcentral[2],region_bounds_southcentral[3])

# Calculate the mean PDSI for all of the simulations
nyears = len(years)
pdsi_sw_allsims = np.zeros((nexperiments,nyears)); pdsi_sw_allsims[:] = np.nan
for i,experiment in enumerate(experiments):
    pdsi_sw_allsims[i,:] = pdsi[experiment+'_annual_sw']

pdsi_sw_mean = np.mean(pdsi_sw_allsims,axis=0)


### VOLCANIC CALCULATIONS

# Compute a global mean from the latitudinal data
lat_volc_weights = np.cos(np.radians(lat_volc))
예제 #5
0
pdsi_nada = np.swapaxes(pdsi_nada, 0, 2)

# Shorten LMR to cover the same years as the CESM simulations
years_lmr_indices = np.where((years_lmr >= years[0])
                             & (years_lmr <= years[-1]))[0]
scpdsi_lmr_allmeans_selected = scpdsi_lmr_allmeans[years_lmr_indices, :, :, :]

# Shorten NADA to cover the same years as the CESM simulations
years_nada_indices = np.where((years_nada >= years[0])
                              & (years_nada <= years[-1]))[0]
pdsi_nada_selected = pdsi_nada[years_nada_indices, :, :]

# Compute mean PDSI for the southwest U.S.
for experiment in experiments:
    pdsi[experiment + '_annual_sw'] = compute_regional_means.compute_means(
        pdsi[experiment + '_annual'], lat, lon, region_bounds_sw[0],
        region_bounds_sw[1], region_bounds_sw[2], region_bounds_sw[3])

# Compute mean PDSI for every iteration of the LMR
lmr_experiments = [
    'lmr01', 'lmr02', 'lmr03', 'lmr04', 'lmr05', 'lmr06', 'lmr07', 'lmr08',
    'lmr09', 'lmr10', 'lmr11', 'lmr12', 'lmr13', 'lmr14', 'lmr15', 'lmr16',
    'lmr17', 'lmr18', 'lmr19', 'lmr20'
]
for i in range(len(lmr_experiments)):
    pdsi[lmr_experiments[i] +
         '_annual_sw'] = compute_regional_means.compute_means(
             scpdsi_lmr_allmeans_selected[:, i, :, :], lat_lmr, lon_lmr,
             region_bounds_sw[0], region_bounds_sw[1], region_bounds_sw[2],
             region_bounds_sw[3])