def plot_nao_correlations(): # load geo-field gf = GeoField() gf.load('data/pres.mon.mean.nc', 'pres') gf.transform_to_anomalies() gf.normalize_monthly_variance() gf.slice_spatial(None, [20, 89]) gf.slice_date_range(date(1950, 1, 1), date(2012, 3, 1)) with open(FILE_NAME_COMPS, 'r') as f: d = cPickle.load(f) # unroll the data data = gf.data() data = np.transpose(np.reshape(data, (data.shape[0], data.shape[1] * data.shape[2]))) # load the monthly NAO index nao = np.loadtxt('data/nao_index.tim.txt', skiprows = 0) naoh = np.loadtxt('data/nao_index_hurrel.tim.txt', skiprows = 0) naoh_ndx = naoh[:, 2] nao_ndx = nao[:, 2] print nao_ndx.shape print naoh_ndx.shape ts_len = min(len(nao_ndx), len(naoh_ndx)) nao_ndx = nao_ndx[:ts_len] naoh_ndx = naoh_ndx[:ts_len] mn = d['mean'] mn = mn / np.sum(mn**2, axis = 0) ** 0.5 Nc = mn.shape[1] ts = np.transpose(np.dot(mn.T, data)) ts = ts[:ts_len, :] Cnao = np.zeros((Nc,)) Cnaoh = np.zeros((Nc,)) for i in range(Nc): Cnao[i] = np.corrcoef(nao_ndx, ts[:, i], rowvar = False)[0,1] Cnaoh[i] = np.corrcoef(naoh_ndx, ts[:, i], rowvar = False)[0,1] f = plt.figure() # plt.plot(nao_ndx, 'r-') # plt.plot(naoh_ndx, 'b-') plt.plot(np.arange(Nc) + 1, Cnao, 'ro-') plt.plot(np.arange(Nc) + 1, Cnaoh, 'go-') plt.legend(('NAO/PC', 'NAO/Stat.')) plt.xlabel('Component index [-]') plt.ylabel('NAO correlation [-]') f.savefig('figs/slp_nh_nao_correlation.pdf') print('Max station NAO correlation: %g at %d' % (np.amax(np.abs(Cnaoh)), np.argmax(np.abs(Cnaoh)))) print('Max PC/NAO correlation: %g at %d' % (np.amax(np.abs(Cnao)), np.argmax(np.abs(Cnao))))
def load_monthly_data_general(fname, varname, from_date, to_date, months, slice_lon, slice_lat, level, var_norm = True): g = GeoField() g.load(fname, varname) if level is not None: g.slice_level(level) g.transform_to_anomalies() if var_norm: g.normalize_variance() g.slice_spatial(slice_lon, slice_lat) g.slice_date_range(from_date, to_date) if months is not None: g.slice_months(months) return g
def load_daily_data_general(fname, varname, from_date, to_date, slice_lon, slice_lat, level): # the daily data is stored in yearly files yr_start = from_date.year yr_end = to_date.year # load each NC dataset gflist = [] Ndays = 0 for yr in range(yr_start, yr_end+1): g = GeoField() g.load(fname % yr, varname) if level is not None: g.slice_level(level) g.slice_spatial(slice_lon, slice_lat) g.slice_date_range(from_date, to_date) Ndays += len(g.tm) gflist.append(g) # now append all of the records together g = GeoField() d = np.zeros((Ndays, len(gflist[0].lats), len(gflist[0].lons))) tm = np.zeros((Ndays,)) n = 0 for g_i in gflist: Ndays_i = len(g_i.tm) d[n:Ndays_i + n, :, :] = g_i.d tm[n:Ndays_i + n] = g_i.tm n += Ndays_i # load geo_fields and then append them together g.use_existing(d, gflist[0].lons, gflist[0].lats, tm) g.transform_to_anomalies() g.normalize_variance() return g
from datetime import datetime, date from surr_geo_field_ar import SurrGeoFieldAR from geo_field import GeoField from multiprocessing import Pool # load netCDF SLP field d = GeoField() d.load("/home/martin/Work/Geo/data/netcdf/pres.mon.mean.nc", 'pres') d.slice_date_range(date(1948, 1, 1), date(2012, 1, 1)) #d.slice_months([12, 1, 2]) d.slice_spatial(None, [-89, 89]) # copy into surrogate field sd = SurrGeoFieldAR() sd.copy_field(d) # create the Pool pool = Pool(4) t1 = datetime.now() sd.prepare_surrogates(pool) print("Prep: elapsed time %s" % str(datetime.now() - t1)) t1 = datetime.now() sd.construct_surrogate() print("Gen: elapsed time %s" % str(datetime.now() - t1)) t1 = datetime.now() sd.construct_surrogate() print("Gen: elapsed time %s" % str(datetime.now() - t1))
# reorder the bootstrap components according to the best matching Ur = Ur[:, perm] return Ur def render_set_par(x): render_component_set(*x) # load up the monthly SLP geo-field gf = GeoField() gf.load("data/pres.mon.mean.nc", 'pres') gf.transform_to_anomalies() gf.normalize_monthly_variance() gf.slice_date_range(date(1948, 1, 1), date(2012, 1, 1)) # years 1948-2012 #gf.slice_spatial(None, [20, 87]) # northern hemisphere, extratropical gf.slice_spatial(None, [-88, 88]) #gf.slice_months([12, 1, 2]) #S = np.zeros(shape = (5, 10), dtype = np.int32) #S[1:4, 0:2] = 1 #S[0:3, 6:9] = 2 #v, Sr = constructVAR(S, [0.0, 0.191, 0.120], [-0.1, 0.1], [0.00, 0.00], [0.01, 0.01]) #ts = v.simulate(768) #gf = make_model_geofield(S, ts) # initialize a parallel pool pool = Pool(POOL_SIZE) # compute components for data
from geo_field import GeoField import numpy as np import cPickle import matplotlib.pyplot as plt import scipy.io as sio if __name__ == '__main__': # load geo-field gf = GeoField() gf.load('data/pres.mon.mean.nc', 'pres') gf.transform_to_anomalies() gf.normalize_monthly_variance() gf.slice_spatial(None, [20, 89]) gf.slice_date_range(date(1950, 1, 1), date(2012, 3, 1)) # load the components with open('results/slp_nh_var_bootstrap_results_b1000_cosweights.bin', 'r') as f: d = cPickle.load(f) # convert to unit vectors mn = d['mean'] mn = mn / np.sum(mn**2, axis = 0) ** 0.5 # mark maxima mx_pos = np.argmax(mn**2, axis = 0) print mx_pos mx_loc = np.zeros_like(mn) mx_loc[(mx_pos, np.arange(mn.shape[1]))] = 1.0
from datetime import datetime, date from surr_geo_field_ar import SurrGeoFieldAR from geo_field import GeoField from multiprocessing import Pool # load netCDF SLP field d = GeoField() d.load("/home/martin/Work/Geo/data/netcdf/pres.mon.mean.nc", 'pres') d.slice_date_range(date(1948, 1, 1), date(2012, 1, 1)) #d.slice_months([12, 1, 2]) d.slice_spatial(None, [-89, 89]) # copy into surrogate field sd = SurrGeoFieldAR() sd.copy_field(d) # create the Pool pool = Pool(4) t1 = datetime.now() sd.prepare_surrogates(pool) print("Prep: elapsed time %s" % str(datetime.now() - t1)) t1 = datetime.now() sd.construct_surrogate() print("Gen: elapsed time %s" % str(datetime.now() - t1))
def plot_nao_correlations(): # load geo-field gf = GeoField() gf.load('data/pres.mon.mean.nc', 'pres') gf.transform_to_anomalies() gf.normalize_monthly_variance() gf.slice_spatial(None, [20, 89]) gf.slice_date_range(date(1950, 1, 1), date(2012, 3, 1)) with open(FILE_NAME_COMPS, 'r') as f: d = cPickle.load(f) # unroll the data data = gf.data() data = np.transpose( np.reshape(data, (data.shape[0], data.shape[1] * data.shape[2]))) # load the monthly NAO index nao = np.loadtxt('data/nao_index.tim.txt', skiprows=0) naoh = np.loadtxt('data/nao_index_hurrel.tim.txt', skiprows=0) naoh_ndx = naoh[:, 2] nao_ndx = nao[:, 2] print nao_ndx.shape print naoh_ndx.shape ts_len = min(len(nao_ndx), len(naoh_ndx)) nao_ndx = nao_ndx[:ts_len] naoh_ndx = naoh_ndx[:ts_len] mn = d['mean'] mn = mn / np.sum(mn**2, axis=0)**0.5 Nc = mn.shape[1] ts = np.transpose(np.dot(mn.T, data)) ts = ts[:ts_len, :] Cnao = np.zeros((Nc, )) Cnaoh = np.zeros((Nc, )) for i in range(Nc): Cnao[i] = np.corrcoef(nao_ndx, ts[:, i], rowvar=False)[0, 1] Cnaoh[i] = np.corrcoef(naoh_ndx, ts[:, i], rowvar=False)[0, 1] f = plt.figure() # plt.plot(nao_ndx, 'r-') # plt.plot(naoh_ndx, 'b-') plt.plot(np.arange(Nc) + 1, Cnao, 'ro-') plt.plot(np.arange(Nc) + 1, Cnaoh, 'go-') plt.legend(('NAO/PC', 'NAO/Stat.')) plt.xlabel('Component index [-]') plt.ylabel('NAO correlation [-]') f.savefig('figs/slp_nh_nao_correlation.pdf') print('Max station NAO correlation: %g at %d' % (np.amax(np.abs(Cnaoh)), np.argmax(np.abs(Cnaoh)))) print('Max PC/NAO correlation: %g at %d' % (np.amax(np.abs(Cnao)), np.argmax(np.abs(Cnao))))
# reorder the bootstrap components according to the best matching Ur = Ur[:, perm] return Ur def render_set_par(x): render_component_set(*x) # load up the monthly SLP geo-field gf = GeoField() gf.load("data/pres.mon.mean.nc", 'pres') gf.transform_to_anomalies() gf.normalize_monthly_variance() gf.slice_date_range(date(1948, 1, 1), date(2012, 1, 1)) # years 1948-2012 #gf.slice_spatial(None, [20, 87]) # northern hemisphere, extratropical gf.slice_spatial(None, [-88, 88]) #gf.slice_months([12, 1, 2]) #S = np.zeros(shape = (5, 10), dtype = np.int32) #S[1:4, 0:2] = 1 #S[0:3, 6:9] = 2 #v, Sr = constructVAR(S, [0.0, 0.191, 0.120], [-0.1, 0.1], [0.00, 0.00], [0.01, 0.01]) #ts = v.simulate(768) #gf = make_model_geofield(S, ts) # initialize a parallel pool pool = Pool(POOL_SIZE)