net = ScaleSpecificNetwork( '/home/nikola/Work/phd/data/air.mon.mean.levels.nc', 'air', date(1948, 1, 1), date(2014, 1, 1), None, None, 0, 'monthly', anom=True) pool = Pool(WORKERS) net.wavelet(PERIOD, get_amplitude=True, pool=pool) print "wavelet on data done" net.get_filtered_data(pool=pool) print "filtered data acquired" autocoherence = np.zeros(net.get_spatial_dims()) job_args = [(i, j, int(AVG * 12 * PERIOD), net.filtered_data[:, i, j]) for i in range(net.lats.shape[0]) for j in range(net.lons.shape[0])] job_result = pool.map(_get_autocoherence, job_args) del job_args pool.close() for i, j, res in job_result: autocoherence[i, j] = res del job_result with open( "networks/NCEP-SATAsurface-autocoherence-filtered-scale%dyears-avg-to-%.1f.bin" % (PERIOD, AVG), "wb") as f: cPickle.dump(
if not PLOT: ## autocoherence filtered data - SATA print "computing autocoherence for SATA filtered data" for PERIOD in periods: for AVG in avg_to: print("computing for %d year period and averaging up to %d" % (PERIOD, 12*AVG*PERIOD)) net = ScaleSpecificNetwork('/home/nikola/Work/phd/data/air.mon.mean.levels.nc', 'air', date(1948,1,1), date(2014,1,1), None, None, 0, 'monthly', anom = True) pool = Pool(WORKERS) net.wavelet(PERIOD, get_amplitude = True, pool = pool) print "wavelet on data done" net.get_filtered_data(pool = pool) print "filtered data acquired" autocoherence = np.zeros(net.get_spatial_dims()) job_args = [ (i, j, int(AVG*12*PERIOD), net.filtered_data[:, i, j]) for i in range(net.lats.shape[0]) for j in range(net.lons.shape[0]) ] job_result = pool.map(_get_autocoherence, job_args) del job_args pool.close() for i, j, res in job_result: autocoherence[i, j] = res del job_result with open("networks/NCEP-SATAsurface-autocoherence-filtered-scale%dyears-avg-to-%.1f.bin" % (PERIOD, AVG), "wb") as f: cPickle.dump({'autocoherence' : autocoherence, 'lats' : net.lats, 'lons' : net.lons}, f, protocol = cPickle.HIGHEST_PROTOCOL) ## autocoherence filtered data - SAT print "computing autocoherence for SAT filtered data" for PERIOD in periods:
nao.data = raw.reshape(-1) nao.create_time_array(date_from = date(1865, 1, 1), sampling = 'm') nao.select_date(date(1949, 1, 1), date(2015, 1, 1)) nao.anomalise() jfm_index = nao.select_months([1,2,3], apply_to_data = False) jfm_nao = nao.data[jfm_index] _, _, y = nao.extract_day_month_year() y = y[jfm_index] ann_nao = [] for year in np.unique(y): ann_nao.append(np.mean(jfm_nao[np.where(year == y)[0]])) ann_nao = np.array(ann_nao) ann_phase_fluc = np.zeros([ann_nao.shape[0]] + list(net.get_spatial_dims())) for lat in range(net.lats.shape[0]): for lon in range(net.lons.shape[0]): jfm_data = net.phase[jfm_index, lat, lon] for i, year in zip(range(np.unique(y).shape[0]), np.unique(y)): ann_phase_fluc[i, lat, lon] = np.mean(jfm_data[np.where(year == y)[0]]) corrs = np.zeros_like(net.data[0, ...]) for lat in range(net.lats.shape[0]): for lon in range(net.lons.shape[0]): corrs[lat, lon] = pearsonr(ann_nao, ann_phase_fluc[:, lat, lon])[0] def _corrs_surrs_ind(args): nao_surr = nao.copy() nao_surr.data = get_single_FT_surrogate(nao.data)
nao.data = raw.reshape(-1) nao.create_time_array(date_from=date(1821, 1, 1), sampling='m') nao.select_date(date(1949, 1, 1), date(2014, 1, 1)) nao.anomalise() jfm_index = nao.select_months([1, 2, 3], apply_to_data=False) jfm_nao = nao.data[jfm_index] _, _, y = nao.extract_day_month_year() y = y[jfm_index] ann_nao = [] for year in np.unique(y): ann_nao.append(np.mean(jfm_nao[np.where(year == y)[0]])) ann_nao = np.array(ann_nao) ann_phase_fluc = np.zeros([ann_nao.shape[0]] + list(net.get_spatial_dims())) for lat in range(net.lats.shape[0]): for lon in range(net.lons.shape[0]): jfm_data = net.phase[jfm_index, lat, lon] for i, year in zip(range(np.unique(y).shape[0]), np.unique(y)): ann_phase_fluc[i, lat, lon] = np.mean(jfm_data[np.where(year == y)[0]]) corrs = np.zeros_like(net.data[0, ...]) for lat in range(net.lats.shape[0]): for lon in range(net.lons.shape[0]): corrs[lat, lon] = pearsonr(ann_nao, ann_phase_fluc[:, lat, lon])[0] def _corrs_surrs_ind(args): nao_surr = nao.copy()