...] # minus last to agree with firn ts t_altim = ap.num2year(f2.root.time_xcal[:-1]) # years lat_altim = f2.root.lat[:] # 1d lon_altim = f2.root.lon[:] # 1d lon_altim = ap.lon_180_360(lon_altim, inverse=True) # get lon/lat only of complete (no gaps) altim ts h_altim = h_altim.sum(axis=0) # 3d -> 2d i_altim, j_altim = np.where(~np.isnan(h_altim)) # 2d indices lonlat_altim = np.column_stack((lon_altim[j_altim], lat_altim[i_altim])) # find nearest lon/lat in the firn model i_firn, j_firn = ap.find_nearest2(lon_firn, lat_firn, lonlat_altim) # find nearest altim times in the firn times k_firn, = ap.find_nearest(t_firn, t_altim) # new firn grid => same altim resolution with original firn time nt, ny, nx = h_firn.shape[0], h_altim.shape[0], h_altim.shape[1] h_firn_new = np.full((nt, ny, nx), np.nan, dtype='f8') # space interpolation (out-of-core) #h_firn_new[:, i_altim, j_altim] = h_firn[:, i_firn, j_firn] h_firn_new = array_to_array(h_firn_new, h_firn, i_altim, j_altim, i_firn, j_firn) del h_firn # 3-month average, and time interpolation print 'smoothing firn...' h_firn_new = as_frame(h_firn_new, t_firn, lat_altim, lon_altim)
h_altim = f2.root.dh_mean_mixed_const_xcal[:-1,...] # minus last to agree with firn ts t_altim = ap.num2year(f2.root.time_xcal[:-1]) # years lat_altim = f2.root.lat[:] # 1d lon_altim = f2.root.lon[:] # 1d lon_altim = ap.lon_180_360(lon_altim, inverse=True) # get lon/lat only of complete (no gaps) altim ts h_altim = h_altim.sum(axis=0) # 3d -> 2d i_altim, j_altim = np.where(~np.isnan(h_altim)) # 2d indices lonlat_altim = np.column_stack((lon_altim[j_altim], lat_altim[i_altim])) # find nearest lon/lat in the firn model i_firn, j_firn = ap.find_nearest2(lon_firn, lat_firn, lonlat_altim) # find nearest altim times in the firn times k_firn, = ap.find_nearest(t_firn, t_altim) # new firn grid => same altim resolution with original firn time nt, ny, nx = h_firn.shape[0], h_altim.shape[0], h_altim.shape[1] h_firn_new = np.full((nt, ny, nx), np.nan, dtype='f8') # space interpolation (out-of-core) #h_firn_new[:, i_altim, j_altim] = h_firn[:, i_firn, j_firn] h_firn_new = array_to_array(h_firn_new, h_firn, i_altim, j_altim, i_firn, j_firn) del h_firn # 3-month average, and time interpolation print 'smoothing firn...' h_firn_new = as_frame(h_firn_new, t_firn, lat_altim, lon_altim) h_firn_smooth = pd.rolling_mean(h_firn_new, 45, center=True)
# spherical -> cartesian xed, yed = ap.cell2node(lon, lat) xed2d, yed2d = np.meshgrid(xed, yed) xyz = np.column_stack(ap.sph2xyz(xed2d.ravel(), yed2d.ravel())) data = as_frame(data, dt, lat, lon) data = data.apply(ap.referenced, to='mean', raw=True) # to mean !!! data = as_array(data) error[np.isnan(data)] = np.nan #--------------------------------------------------------------------- if PLOT: # plot time series data = as_array(data) j, = ap.find_nearest(lon, LON) i, = ap.find_nearest(lat, LAT) i, j = i[0], j[0] y = data[:,i,j] plt.plot(time, y, linewidth=2) plt.show() print len(y[~np.isnan(y)]) print 'percentage of data points: %.1f' \ % ((len(y[~np.isnan(y)]) / float(len(y))) * 100) # save data #--------------------------------------------------------------------- if SAVE: print('saving data...') fout = tb.open_file(FILE_OUT, 'w')