'George_S', 'George_N', 'Larsen_B', 'Larsen_C', 'Larsen_D', 'Ronne', 'Filchner', 'Brunt', 'Riiser', 'All_Antarctica', ] print 'loading data...' fin = tb.openFile(DIR + FILE_IN) try: time = ap.num2year(fin.root.time[:]) except: time = fin.root.time[:] lon = fin.root.lon[:] lat = fin.root.lat[:] d = fin.root.dh_mean_mixed_const_xcal[:] #d = fin.root.dg_mean_xcal[:] #e = fin.root.dh_error_xcal[:] #d = fin.root.n_ad_xcal[:] nz, ny, nx = d.shape dt = ap.year2date(time) if 1: # load area info df_temp = pd.read_csv('ice_shelf_area4.csv') df_area = df_temp['sampled_km2'] df_area.index = df_temp['ice_shelf']
# read firn print 'reading firn...' f1 = nc.Dataset(FILE_FIRN) d = f1.variables h_firn = d['zs'] #[:] t_firn = d['time'][:] # years lon_firn = d['lon'][:] # 2d lat_firn = d['lat'][:] # 2d # read altim print 'reading altim...' f2 = tb.openFile(FILE_ALTIM, 'a') h_altim = f2.root.dh_mean_mixed_const_xcal[:-1,...] # minus last to agree with firn ts t_altim = ap.num2year(f2.root.time_xcal[:-1]) # years lat_altim = f2.root.lat[:] # 1d lon_altim = f2.root.lon[:] # 1d lon_altim = ap.lon_180_360(lon_altim, inverse=True) # get lon/lat only of complete (no gaps) altim ts h_altim = h_altim.sum(axis=0) # 3d -> 2d i_altim, j_altim = np.where(~np.isnan(h_altim)) # 2d indices lonlat_altim = np.column_stack((lon_altim[j_altim], lat_altim[i_altim])) # find nearest lon/lat in the firn model i_firn, j_firn = ap.find_nearest2(lon_firn, lat_firn, lonlat_altim) # find nearest altim times in the firn times k_firn, = ap.find_nearest(t_firn, t_altim)
ii, = np.where(~np.isnan(dh[:, i, j])) if len(ii) < 8: continue # TREND m, c = ap.linear_fit(year, dh[:, i, j], return_coef=True) dhdt[i, j] = m if m == 0: continue return dhdt ap.rcparams() print 'loading data...' f = tb.openFile(DIR + FNAME) time = ap.num2year(f.root.time_xcal[:]) lon = f.root.lon[:] lat = f.root.lat[:] xed = f.root.x_edges[:] yed = f.root.y_edges[:] h0 = f.root.dh_mean_xcal[:] h = f.root.dh_mean_xcal_interp_short_const[:] #h = f.root.dh_mean_short_const_xcal[:] h2 = f.root.dh_mean_xcal_short_const[:] #h2 = f.root.dh_mean_short_const_xcal[:] g = f.root.dg_mean_xcal[:] g2 = f.root.dg_mean_xcal[:] dt = ap.year2date(time) # add interpolated grid-cells ind = np.where(~np.isnan(h2))
def main(): fname_in = sys.argv[1] din = GetData(fname_in, 'a') satname = din.satname time = change_day(din.time, 15) # change all days (e.g. 14,15,16,17) to 15 ts = getattr(din, VAR_TO_CALIBRATE) err = din.dh_error n_ad = din.n_ad n_da = din.n_da lon = din.lon lat = din.lat din.file.close() t = ap.num2year(time) if SUBSET: # get subset ts, lon2, lat2 = ap.get_subset(ap.amundsen, ts, lon, lat) err, lon2, lat2 = ap.get_subset(ap.amundsen, err, lon, lat) n_ad, lon2, lat2 = ap.get_subset(ap.amundsen, n_ad, lon, lat) n_da, lon2, lat2 = ap.get_subset(ap.amundsen, n_da, lon, lat) lon, lat = lon2, lat2 xx, yy = np.meshgrid(lon, lat) nt, ny, nx = ts.shape offset_12 = np.full((ny, nx), np.nan) offset_23 = np.full((ny, nx), np.nan) print 'cross-calibrating time series:', VAR_TO_CALIBRATE isfirst = True if SAT_NAMES is None: satnames = np.unique(din.satname) # iterate over every grid cell (all times) no_overlap_12 = 0 no_overlap_23 = 0 for i in xrange(ny): for j in xrange(nx): if 0: i, j = ap.find_nearest2(xx, yy, (LON, LAT)) i -= 0 j += 0 print 'grid-cell:', i, j ts_ij = ts[:, i, j] if np.isnan(ts_ij).all(): continue # get all time series (all sats) in one df (per grid-cell) var = create_df_with_sats(time, ts_ij, satname, SAT_NAMES) if DETREND: var = var.apply(detrend) if FILTER: var = var.apply(ap.hp_filt, lamb=7, nan=True) if PLOT_TS and (var.count().sum() > 10): print 'grid-cell:', i, j var.plot(linewidth=3, figsize=(9, 3), legend=False) plt.title('Elevation change, dh (lon=%.2f, lat=%.2f)' % (xx[i, j], yy[i, j])) plt.ylabel('m') plt.show() # compute offset (if ts overlap) #--------------------------------------------------- x = pd.notnull(var) overlap_12 = x['ers1'] & x['ers2'] overlap_23 = x['ers2'] & x['envi'] if np.sometrue(overlap_12): if SAT_BIAS: s1 = var['ers1'][overlap_12] s2 = var['ers2'][overlap_12] if LINEAR_FIT: # using linear fit s1 = s1[s1.notnull() & s2.notnull()] s2 = s2[s1.notnull() & s2.notnull()] if len(s1) > 1 and len(s2) > 1: s1.index, s1[:] = ap.linear_fit( ap.date2year(s1.index), s1.values) s2.index, s2[:] = ap.linear_fit( ap.date2year(s2.index), s2.values) offset = (s1.values[-1] - s1.values[0]) - ( s2.values[-1] - s2.values[0]) else: pass else: # using absolute values s1 = ap.referenced(s1, to='first') s2 = ap.referenced(s2, to='first') s1[0], s2[0] = np.nan, np.nan # remove first values offset = np.nanmean(s1 - s2) #pd.concat((s1, s2), axis=1).plot(marker='o') else: offset = np.nanmean(var['ers1'] - var['ers2']) offset_12[i, j] = offset else: no_overlap_12 += 1 if np.sometrue(overlap_23): if SAT_BIAS: s2 = var['ers2'][overlap_23] s3 = var['envi'][overlap_23] if LINEAR_FIT: s2 = s2[s2.notnull() & s3.notnull()] s3 = s3[s2.notnull() & s3.notnull()] if len(s2) > 1 and len(s3) > 1: s2.index, s2[:] = ap.linear_fit( ap.date2year(s2.index), s2.values) s3.index, s3[:] = ap.linear_fit( ap.date2year(s3.index), s3.values) offset = (s2.values[-1] - s2.values[0]) - ( s3.values[-1] - s3.values[0]) else: pass else: s2 = ap.referenced(s2, to='first') s3 = ap.referenced(s3, to='first') s2[0], s3[0] = np.nan, np.nan offset = np.nanmean(s2 - s3) #pd.concat((s2, s3), axis=1).plot(marker='o') #plt.show() else: offset = np.nanmean(var['ers2'] - var['envi']) offset_23[i, j] = offset else: no_overlap_23 += 1 #--------------------------------------------------- mean_offset_12 = np.nanmean(offset_12) median_offset_12 = np.nanmedian(offset_12) mean_offset_23 = np.nanmean(offset_23) median_offset_23 = np.nanmedian(offset_23) if SAVE_TO_FILE: fout = tb.open_file(FNAME_OUT, 'w') fout.create_array('/', 'lon', lon) fout.create_array('/', 'lat', lat) fout.create_array('/', 'offset_12', offset_12) fout.create_array('/', 'offset_23', offset_23) fout.close() if PLOT: plt.figure() plt.subplot(211) offset_12 = ap.median_filt(offset_12, 3, 3) plt.imshow(offset_12, origin='lower', interpolation='nearest', vmin=-.5, vmax=.5) plt.title('ERS1-ERS2') plt.colorbar(shrink=0.8) plt.subplot(212) offset_23 = ap.median_filt(offset_23, 3, 3) plt.imshow(offset_23, origin='lower', interpolation='nearest', vmin=-.5, vmax=.5) plt.title('ERS2-Envisat') #plt.colorbar(shrink=0.3, orientation='h') plt.colorbar(shrink=0.8) plt.figure() plt.subplot(121) o12 = offset_12[~np.isnan(offset_12)] plt.hist(o12, bins=100) plt.title('ERS1-ERS2') ax = plt.gca() ap.intitle('mean/median = %.2f/%.2f m' % (mean_offset_12, median_offset_12), ax=ax, loc=2) plt.xlim(-1, 1) plt.subplot(122) o23 = offset_23[~np.isnan(offset_23)] plt.hist(o23, bins=100) plt.title('ERS2-Envisat') ax = plt.gca() ap.intitle('mean/median = %.2f/%.2f m' % (mean_offset_23, median_offset_23), ax=ax, loc=2) plt.xlim(-1, 1) plt.show() print 'calibrated variable:', VAR_TO_CALIBRATE print 'no overlaps:', no_overlap_12, no_overlap_23 print 'mean offset:', mean_offset_12, mean_offset_23 print 'median offset:', median_offset_12, median_offset_23 print 'out file ->', FNAME_OUT
# read firn print 'reading firn...' f1 = nc.Dataset(FILE_FIRN) d = f1.variables h_firn = d['zs'] #[:] t_firn = d['time'][:] # years lon_firn = d['lon'][:] # 2d lat_firn = d['lat'][:] # 2d # read altim print 'reading altim...' f2 = tb.openFile(FILE_ALTIM, 'a') h_altim = f2.root.dh_mean_mixed_const_xcal[:-1, ...] # minus last to agree with firn ts t_altim = ap.num2year(f2.root.time_xcal[:-1]) # years lat_altim = f2.root.lat[:] # 1d lon_altim = f2.root.lon[:] # 1d lon_altim = ap.lon_180_360(lon_altim, inverse=True) # get lon/lat only of complete (no gaps) altim ts h_altim = h_altim.sum(axis=0) # 3d -> 2d i_altim, j_altim = np.where(~np.isnan(h_altim)) # 2d indices lonlat_altim = np.column_stack((lon_altim[j_altim], lat_altim[i_altim])) # find nearest lon/lat in the firn model i_firn, j_firn = ap.find_nearest2(lon_firn, lat_firn, lonlat_altim) # find nearest altim times in the firn times k_firn, = ap.find_nearest(t_firn, t_altim)
def testSpline(x, y, s, npts): sp = UnivariateSpline(x, y, s=240) plt.plot(x,sp(x),'r') print "splerr", ssqe(sp(x), s, npts) return sp(x) def plotPowerSpectrum(y, w): ft = np.fft.rfft(y) ps = np.real(ft*np.conj(ft))*np.square(dt) plt.plot(w, ps) if __name__ == '__main__': i, j = 1, 161 f = tb.openFile(DIR + FNAME) x = ap.num2year(f.root.time_xcal[:]) y = f.root.dh_mean_xcal_interp_short_const[:,i,j] sigma = np.var(y) s = y[:] npts = len(x) end = x[-1] dt = end/float(npts) nyf = 0.5/dt ''' sigma = 0.5 x = np.linspace(0,end,npts) r = np.random.normal(scale = sigma, size=(npts)) s = np.sin(2*np.pi*x)#+np.sin(4*2*np.pi*x) y = s + r '''
lsm = d1['lsm'][:] #dt1 = ap.year2date(year1) dt1 = year1 f2 = tb.openFile(FILE2) #elev = f2.root.elev[:] elev = f2.root.dh_mean_all[:] time2 = f2.root.time_all[:] lon2 = f2.root.lon[:] # 1d lat2 = f2.root.lat[:] # 1d lon2 = ap.lon_180_360(lon2, inverse=True) lons2, lats2 = np.meshgrid(lon2, lat2) points2 = np.column_stack((lons2.ravel(), lats2.ravel())) #points2 = (lons2[6,4], lats2[6,4]) #dt2 = ap.num2date(time2) dt2 = ap.num2year(time2) f3 = tb.openFile(FILE3) d3 = f3.root.data[:] y3, x3 = d3[:,0], d3[:,1] x3 = ap.lon_180_360(x3, inverse=True) ''' f4 = tb.openFile(FILE4) d4 = f4.root.data[:] y4, x4 = d4[:,0], d4[:,1] x4 = ap.lon_180_360(x4, inverse=True) ''' # crop firn grid #i1, j1 = ap.find_nearest2(lons1, lats1, points2)
def write_slabs(fid, var_name, data): """Save 3d array into several slabs, for XDMF.""" g = fid.create_group('/', 'data') for i, d in enumerate(data): fid.create_array(g, var_name +'_%02d' % i, d) ap.rcparams() # read data print('loading data...') fin = tb.open_file(FILE_IN) data = fin.root.dh_mean_mixed_const_xcal[:] error = fin.root.dh_error_xcal[:] time = ap.num2year(fin.root.time_xcal[:]) lon = fin.root.lon[:] lat = fin.root.lat[:] xx, yy = np.meshgrid(lon, lat) nz, ny, nx = data.shape f1 = tb.open_file(FILE_OFFSET) offset_12 = f1.root.offset_12[:] offset_23 = f1.root.offset_23[:] f1.close() print('done') if 0: # (for testing only) subset print lon.shape, lat.shape, data.shape i, j = ap.where_isnan('Totten', lon, lat) data[:,i,j] = np.nan
def main(): fname_in = sys.argv[1] din = GetData(fname_in, 'a') satname = din.satname time = change_day(din.time, 15) # change all days (e.g. 14,15,16,17) to 15 ts = getattr(din, VAR_TO_CALIBRATE) err = din.dh_error n_ad = din.n_ad n_da = din.n_da lon = din.lon lat = din.lat din.file.close() t = ap.num2year(time) if SUBSET: # get subset ts, lon2, lat2 = ap.get_subset(ap.amundsen, ts, lon, lat) err, lon2, lat2 = ap.get_subset(ap.amundsen, err, lon, lat) n_ad, lon2, lat2 = ap.get_subset(ap.amundsen, n_ad, lon, lat) n_da, lon2, lat2 = ap.get_subset(ap.amundsen, n_da, lon, lat) lon, lat = lon2, lat2 xx, yy = np.meshgrid(lon, lat) nt, ny, nx = ts.shape offset_12 = np.full((ny,nx), np.nan) offset_23 = np.full((ny,nx), np.nan) print 'cross-calibrating time series:', VAR_TO_CALIBRATE isfirst = True if SAT_NAMES is None: satnames = np.unique(din.satname) # iterate over every grid cell (all times) no_overlap_12 = 0 no_overlap_23 = 0 for i in xrange(ny): for j in xrange(nx): if 0: i, j = ap.find_nearest2(xx, yy, (LON,LAT)) i -= 0 j += 0 print 'grid-cell:', i, j ts_ij = ts[:,i,j] if np.isnan(ts_ij).all(): continue # get all time series (all sats) in one df (per grid-cell) var = create_df_with_sats(time, ts_ij, satname, SAT_NAMES) if DETREND: var = var.apply(detrend) if FILTER: var = var.apply(ap.hp_filt, lamb=7, nan=True) if PLOT_TS and (var.count().sum() > 10): print 'grid-cell:', i, j var.plot(linewidth=3, figsize=(9, 3), legend=False) plt.title('Elevation change, dh (lon=%.2f, lat=%.2f)' % (xx[i,j], yy[i,j])) plt.ylabel('m') plt.show() # compute offset (if ts overlap) #--------------------------------------------------- x = pd.notnull(var) overlap_12 = x['ers1'] & x['ers2'] overlap_23 = x['ers2'] & x['envi'] if np.sometrue(overlap_12): if SAT_BIAS: s1 = var['ers1'][overlap_12] s2 = var['ers2'][overlap_12] if LINEAR_FIT: # using linear fit s1 = s1[s1.notnull() & s2.notnull()] s2 = s2[s1.notnull() & s2.notnull()] if len(s1) > 1 and len(s2) > 1: s1.index, s1[:] = ap.linear_fit(ap.date2year(s1.index), s1.values) s2.index, s2[:] = ap.linear_fit(ap.date2year(s2.index), s2.values) offset = (s1.values[-1] - s1.values[0]) - (s2.values[-1] - s2.values[0]) else: pass else: # using absolute values s1 = ap.referenced(s1, to='first') s2 = ap.referenced(s2, to='first') s1[0], s2[0] = np.nan, np.nan # remove first values offset = np.nanmean(s1 - s2) #pd.concat((s1, s2), axis=1).plot(marker='o') else: offset = np.nanmean(var['ers1'] - var['ers2']) offset_12[i,j] = offset else: no_overlap_12 += 1 if np.sometrue(overlap_23): if SAT_BIAS: s2 = var['ers2'][overlap_23] s3 = var['envi'][overlap_23] if LINEAR_FIT: s2 = s2[s2.notnull() & s3.notnull()] s3 = s3[s2.notnull() & s3.notnull()] if len(s2) > 1 and len(s3) > 1: s2.index, s2[:] = ap.linear_fit(ap.date2year(s2.index), s2.values) s3.index, s3[:] = ap.linear_fit(ap.date2year(s3.index), s3.values) offset = (s2.values[-1] - s2.values[0]) - (s3.values[-1] - s3.values[0]) else: pass else: s2 = ap.referenced(s2, to='first') s3 = ap.referenced(s3, to='first') s2[0], s3[0] = np.nan, np.nan offset = np.nanmean(s2 - s3) #pd.concat((s2, s3), axis=1).plot(marker='o') #plt.show() else: offset = np.nanmean(var['ers2'] - var['envi']) offset_23[i,j] = offset else: no_overlap_23 += 1 #--------------------------------------------------- mean_offset_12 = np.nanmean(offset_12) median_offset_12 = np.nanmedian(offset_12) mean_offset_23 = np.nanmean(offset_23) median_offset_23 = np.nanmedian(offset_23) if SAVE_TO_FILE: fout = tb.open_file(FNAME_OUT, 'w') fout.create_array('/', 'lon', lon) fout.create_array('/', 'lat', lat) fout.create_array('/', 'offset_12', offset_12) fout.create_array('/', 'offset_23', offset_23) fout.close() if PLOT: plt.figure() plt.subplot(211) offset_12 = ap.median_filt(offset_12, 3, 3) plt.imshow(offset_12, origin='lower', interpolation='nearest', vmin=-.5, vmax=.5) plt.title('ERS1-ERS2') plt.colorbar(shrink=0.8) plt.subplot(212) offset_23 = ap.median_filt(offset_23, 3, 3) plt.imshow(offset_23, origin='lower', interpolation='nearest', vmin=-.5, vmax=.5) plt.title('ERS2-Envisat') #plt.colorbar(shrink=0.3, orientation='h') plt.colorbar(shrink=0.8) plt.figure() plt.subplot(121) o12 = offset_12[~np.isnan(offset_12)] plt.hist(o12, bins=100) plt.title('ERS1-ERS2') ax = plt.gca() ap.intitle('mean/median = %.2f/%.2f m' % (mean_offset_12, median_offset_12), ax=ax, loc=2) plt.xlim(-1, 1) plt.subplot(122) o23 = offset_23[~np.isnan(offset_23)] plt.hist(o23, bins=100) plt.title('ERS2-Envisat') ax = plt.gca() ap.intitle('mean/median = %.2f/%.2f m' % (mean_offset_23, median_offset_23), ax=ax, loc=2) plt.xlim(-1, 1) plt.show() print 'calibrated variable:', VAR_TO_CALIBRATE print 'no overlaps:', no_overlap_12, no_overlap_23 print 'mean offset:', mean_offset_12, mean_offset_23 print 'median offset:', median_offset_12, median_offset_23 print 'out file ->', FNAME_OUT