def get_subset(region, arr, x, y): """Get subset from a 2d/3d-array. Parameters ---------- region : (left, right, bottom, top) arr : 2d/3d array to subset x, y : 1d arrays with coordinates of arr Returns ------- arr_sub, x_sub, y_sub : a subset of 'arr' """ arr2, x2, y2 = arr.copy(), x.copy(), y.copy() # important! l, r, b, t = region x2 = lon_180_360(x2, region) j, = np.where((l <= x2) & (x2 <= r)) i, = np.where((b <= y2) & (y2 <= t)) if len(j) == 0 or len(i) == 0: raise ValueError('region given does not agree with coordinates!') j1, j2 = j.min(), j.max() i1, i2 = i.min(), i.max() return arr2[...,i1:i2,j1:j2], x2[j1:j2], y2[i1:i2]
def main(args): # input args #--------------------------------------------------------------------- if len(args.files) > 1: files = args.files else: files = glob(args.files[0]) # using 'glob' fname_out = args.fname_out region = args.region x_range = region[:2] y_range = region[2:] dx = args.delta[0] dy = args.delta[1] nsect = args.nsectors suffix = args.suffix if nsect > 1: subdom = get_sub_domains(x_range[0], x_range[1], nsect) if len(files) > 1: files.sort(key=lambda s: re.findall('\d\d\d\d\d\d+', s)) print 'processing %d files...' % len(files) for fname in files: if nsect > 1: sectnum = get_sect_num(fname) x_range = subdom[sectnum] d = get_data(fname) d['satname'], d['time1'], d['time2'] = get_info(fname) # pre-processing #----------------------------------------------------------------- d['lon'] = alt.lon_180_360(d['lon'], region=region) # filter data -> select modes and "good" data points d = filter_data(d, ice_only=ICE_ONLY) # go to next file if d is None: print 'No data left after filtering!\nFile:', fname continue # apply tide and load corrections """ The sign of the correction is the same (subtract), but the phase of the load tide is ~180 degrees off the ocean tide. E.g., if the ocean tide at (t,x,y) is +1.0 m, the load tide is probably -0.03 m (more or less), so the correction equation would be: tide_free = measured - (+1.0) - (-0.03) = measured - (+0.97) """ d['h1'] -= d['tide1'] d['h2'] -= d['tide2'] # digitize lons and lats (create the grid) lon, lat, j_bins, i_bins, x_edges, y_edges, nx, ny = \ digitize(d['lon'], d['lat'], x_range, y_range, dx, dy) h1, h2 = d['h1'], d['h2'] g1, g2 = d['g1'], d['g2'] ftrk1, ftrk2 = d['ftrk1'], d['ftrk2'] # Output grids g = OutGrids(ny, nx) # calculations per grid cell #----------------------------------------------------------------- for i in xrange(ny): for j in xrange(nx): ''' # data corresponding to the 'i,j' grid cell i_cell, = np.where((x_edges[j] <= d['lon']) & \ (d['lon'] < x_edges[j+1]) & \ (y_edges[i] <= d['lat']) & \ (d['lat'] < y_edges[i+1])) ''' # indices of data corresponding to the 'i,j' grid cell i_cell, = np.where((j_bins == j + 1) & (i_bins == i + 1)) if len(i_cell) == 0: continue # xovers per cell dh = h2[i_cell] - h1[i_cell] # always t2 - t1 ! dg = g2[i_cell] - g1[i_cell] # separate in asc/des-des/asc i_ad, i_da = where_ad_da(ftrk1[i_cell], ftrk2[i_cell]) dh_ad = dh[i_ad] dh_da = dh[i_da] dg_ad = dg[i_ad] dg_da = dg[i_da] # filter absolute values i_ad = abs_editing(dh_ad, absval=ABS_VAL, return_index=True) i_da = abs_editing(dh_da, absval=ABS_VAL, return_index=True) dh_ad = dh_ad[i_ad] dh_da = dh_da[i_da] dg_ad = dg_ad[i_ad] dg_da = dg_da[i_da] # filter standard deviation i_ad = std_editing(dh_ad, nsd=NUM_STD, iterative=ITERATIVE, return_index=True) i_da = std_editing(dh_da, nsd=NUM_STD, iterative=ITERATIVE, return_index=True) if len(i_ad) == 0 and len(i_da) == 0: pass else: dh_ad = dh_ad[i_ad] dh_da = dh_da[i_da] dg_ad = dg_ad[i_ad] dg_da = dg_da[i_da] # mean values g.dh_mean[i, j] = compute_weighted_mean(dh_ad, dh_da, useall=USEALL, median=MEDIAN) g.dh_error[i, j] = compute_weighted_error(dh_ad, dh_da, useall=USEALL) g.dh_error2[i, j] = compute_wingham_error(dh_ad, dh_da, useall=USEALL) g.dg_mean[i, j] = compute_weighted_mean(dg_ad, dg_da, useall=USEALL, median=MEDIAN) g.dg_error[i, j] = compute_weighted_error(dg_ad, dg_da, useall=USEALL) g.dg_error2[i, j] = compute_wingham_error(dg_ad, dg_da, useall=USEALL) g.n_ad[i, j], g.n_da[i, j] = compute_num_obs(dh_ad, dh_da, useall=USEALL) # gaussian smooth if PLOT and GAUSS_SMOOTH and not SAVE_TO_FILE: g.dh_mean = gaussian_filter(g.dh_mean, GAUSS_WIDTH) g.dg_mean = gaussian_filter(g.dg_mean, GAUSS_WIDTH) # save the grids #----------------------------------------------------------------- if SAVE_TO_FILE: # save one set of grids per iteration (i.e., per file) fname_out = fname.replace('.h5', suffix) out = OutputContainers(fname_out, (1, ny, nx)) out.lon[:] = lon out.lat[:] = lat out.x_edges[:] = x_edges out.y_edges[:] = y_edges out.time1[:] = d['time1'] out.time2[:] = d['time2'] out.dh_mean[:] = g.dh_mean out.dh_error[:] = g.dh_error out.dh_error2[:] = g.dh_error2 out.dg_mean[:] = g.dg_mean out.dg_error[:] = g.dg_error out.dg_error2[:] = g.dg_error2 out.n_ad[:] = g.n_ad out.n_da[:] = g.n_da out.file.flush() out.file.close() try: print_info(x_edges, y_edges, lon, lat, dx, dy, 1, g.n_ad, g.n_da, source='None') except: pass if PLOT: try: plt.plot(d['lon'], d['lat'], '.') plot_grids(x_edges, y_edges, g.dh_mean, g.dh_error, g.n_ad, g.n_da) plt.show() except: print 'no data to plot!' if SAVE_TO_FILE: print 'file out -->', fname_out, '\n'
def main(args): # input args #--------------------------------------------------------------------- if len(args.files) > 1: files = args.files else: files = glob(args.files[0]) # using 'glob' fname_out = args.fname_out region = args.region x_range = region[:2] y_range = region[2:] dx = args.delta[0] dy = args.delta[1] nsect = args.nsectors suffix = args.suffix if nsect > 1: subdom = get_sub_domains(x_range[0], x_range[1], nsect) if len(files) > 1: files.sort(key=lambda s: re.findall('\d\d\d\d\d\d+', s)) print 'processing %d files...' % len(files) for fname in files: if nsect > 1: sectnum = get_sect_num(fname) x_range = subdom[sectnum] d = get_data(fname) d['satname'], d['time1'], d['time2'] = get_info(fname) # pre-processing #----------------------------------------------------------------- d['lon'] = alt.lon_180_360(d['lon'], region=region) # filter data -> select modes and "good" data points d = filter_data(d, ice_only=ICE_ONLY) # go to next file if d is None: print 'No data left after filtering!\nFile:', fname continue # apply tide and load corrections """ The sign of the correction is the same (subtract), but the phase of the load tide is ~180 degrees off the ocean tide. E.g., if the ocean tide at (t,x,y) is +1.0 m, the load tide is probably -0.03 m (more or less), so the correction equation would be: tide_free = measured - (+1.0) - (-0.03) = measured - (+0.97) """ d['h1'] -= d['tide1'] d['h2'] -= d['tide2'] # digitize lons and lats (create the grid) lon, lat, j_bins, i_bins, x_edges, y_edges, nx, ny = \ digitize(d['lon'], d['lat'], x_range, y_range, dx, dy) h1, h2 = d['h1'], d['h2'] g1, g2 = d['g1'], d['g2'] ftrk1, ftrk2 = d['ftrk1'], d['ftrk2'] # Output grids g = OutGrids(ny, nx) # calculations per grid cell #----------------------------------------------------------------- for i in xrange(ny): for j in xrange(nx): ''' # data corresponding to the 'i,j' grid cell i_cell, = np.where((x_edges[j] <= d['lon']) & \ (d['lon'] < x_edges[j+1]) & \ (y_edges[i] <= d['lat']) & \ (d['lat'] < y_edges[i+1])) ''' # indices of data corresponding to the 'i,j' grid cell i_cell, = np.where((j_bins == j+1) & (i_bins == i+1)) if len(i_cell) == 0: continue # xovers per cell dh = h2[i_cell] - h1[i_cell] # always t2 - t1 ! dg = g2[i_cell] - g1[i_cell] # separate in asc/des-des/asc i_ad, i_da = where_ad_da(ftrk1[i_cell], ftrk2[i_cell]) dh_ad = dh[i_ad] dh_da = dh[i_da] dg_ad = dg[i_ad] dg_da = dg[i_da] # filter absolute values i_ad = abs_editing(dh_ad, absval=ABS_VAL, return_index=True) i_da = abs_editing(dh_da, absval=ABS_VAL, return_index=True) dh_ad = dh_ad[i_ad] dh_da = dh_da[i_da] dg_ad = dg_ad[i_ad] dg_da = dg_da[i_da] # filter standard deviation i_ad = std_editing(dh_ad, nsd=NUM_STD, iterative=ITERATIVE, return_index=True) i_da = std_editing(dh_da, nsd=NUM_STD, iterative=ITERATIVE, return_index=True) if len(i_ad) == 0 and len(i_da) == 0: pass else: dh_ad = dh_ad[i_ad] dh_da = dh_da[i_da] dg_ad = dg_ad[i_ad] dg_da = dg_da[i_da] # mean values g.dh_mean[i,j] = compute_weighted_mean(dh_ad, dh_da, useall=USEALL, median=MEDIAN) g.dh_error[i,j] = compute_weighted_error(dh_ad, dh_da, useall=USEALL) g.dh_error2[i,j] = compute_wingham_error(dh_ad, dh_da, useall=USEALL) g.dg_mean[i,j] = compute_weighted_mean(dg_ad, dg_da, useall=USEALL, median=MEDIAN) g.dg_error[i,j] = compute_weighted_error(dg_ad, dg_da, useall=USEALL) g.dg_error2[i,j] = compute_wingham_error(dg_ad, dg_da, useall=USEALL) g.n_ad[i,j], g.n_da[i,j] = compute_num_obs(dh_ad, dh_da, useall=USEALL) # gaussian smooth if PLOT and GAUSS_SMOOTH and not SAVE_TO_FILE: g.dh_mean = gaussian_filter(g.dh_mean, GAUSS_WIDTH) g.dg_mean = gaussian_filter(g.dg_mean, GAUSS_WIDTH) # save the grids #----------------------------------------------------------------- if SAVE_TO_FILE: # save one set of grids per iteration (i.e., per file) fname_out = fname.replace('.h5', suffix) out = OutputContainers(fname_out, (1,ny,nx)) out.lon[:] = lon out.lat[:] = lat out.x_edges[:] = x_edges out.y_edges[:] = y_edges out.time1[:] = d['time1'] out.time2[:] = d['time2'] out.dh_mean[:] = g.dh_mean out.dh_error[:] = g.dh_error out.dh_error2[:] = g.dh_error2 out.dg_mean[:] = g.dg_mean out.dg_error[:] = g.dg_error out.dg_error2[:] = g.dg_error2 out.n_ad[:] = g.n_ad out.n_da[:] = g.n_da out.file.flush() out.file.close() try: print_info(x_edges, y_edges, lon, lat, dx, dy, 1, g.n_ad, g.n_da, source='None') except: pass if PLOT: try: plt.plot(d['lon'], d['lat'], '.') plot_grids(x_edges, y_edges, g.dh_mean, g.dh_error, g.n_ad, g.n_da) plt.show() except: print 'no data to plot!' if SAVE_TO_FILE: print 'file out -->', fname_out, '\n'
print 'reading firn...' f1 = nc.Dataset(FILE_FIRN) d = f1.variables h_firn = d['zs'] #[:] t_firn = d['time'][:] # years lon_firn = d['lon'][:] # 2d lat_firn = d['lat'][:] # 2d # read altim print 'reading altim...' f2 = tb.openFile(FILE_ALTIM, 'a') h_altim = f2.root.dh_mean_mixed_const_xcal[:-1,...] # minus last to agree with firn ts t_altim = ap.num2year(f2.root.time_xcal[:-1]) # years lat_altim = f2.root.lat[:] # 1d lon_altim = f2.root.lon[:] # 1d lon_altim = ap.lon_180_360(lon_altim, inverse=True) # get lon/lat only of complete (no gaps) altim ts h_altim = h_altim.sum(axis=0) # 3d -> 2d i_altim, j_altim = np.where(~np.isnan(h_altim)) # 2d indices lonlat_altim = np.column_stack((lon_altim[j_altim], lat_altim[i_altim])) # find nearest lon/lat in the firn model i_firn, j_firn = ap.find_nearest2(lon_firn, lat_firn, lonlat_altim) # find nearest altim times in the firn times k_firn, = ap.find_nearest(t_firn, t_altim) # new firn grid => same altim resolution with original firn time nt, ny, nx = h_firn.shape[0], h_altim.shape[0], h_altim.shape[1] h_firn_new = np.full((nt, ny, nx), np.nan, dtype='f8')
f1 = nc.Dataset(FILE_FIRN) d = f1.variables h_firn = d['zs'] #[:] t_firn = d['time'][:] # years lon_firn = d['lon'][:] # 2d lat_firn = d['lat'][:] # 2d # read altim print 'reading altim...' f2 = tb.openFile(FILE_ALTIM, 'a') h_altim = f2.root.dh_mean_mixed_const_xcal[:-1, ...] # minus last to agree with firn ts t_altim = ap.num2year(f2.root.time_xcal[:-1]) # years lat_altim = f2.root.lat[:] # 1d lon_altim = f2.root.lon[:] # 1d lon_altim = ap.lon_180_360(lon_altim, inverse=True) # get lon/lat only of complete (no gaps) altim ts h_altim = h_altim.sum(axis=0) # 3d -> 2d i_altim, j_altim = np.where(~np.isnan(h_altim)) # 2d indices lonlat_altim = np.column_stack((lon_altim[j_altim], lat_altim[i_altim])) # find nearest lon/lat in the firn model i_firn, j_firn = ap.find_nearest2(lon_firn, lat_firn, lonlat_altim) # find nearest altim times in the firn times k_firn, = ap.find_nearest(t_firn, t_altim) # new firn grid => same altim resolution with original firn time nt, ny, nx = h_firn.shape[0], h_altim.shape[0], h_altim.shape[1] h_firn_new = np.full((nt, ny, nx), np.nan, dtype='f8')
if 1: # load mask print 'loading mask...' f = tb.open_file(FILE_MSK, 'r') x_msk = f.root.x[:] y_msk = f.root.y[:] msk = f.root.mask[:] f.close() print 'done' if 1: # 2d -> 1d, x/y -> lon/lat x_msk, y_msk = np.meshgrid(x_msk, y_msk) # 1d -> 2d x_msk, y_msk = x_msk.ravel(), y_msk.ravel() # 2d -> 1d msk = msk.ravel() lon_msk, lat_msk = ap.xy2ll(x_msk, y_msk, units='m') lon_msk = ap.lon_180_360(lon_msk) del x_msk, y_msk lon_nodes, lat_nodes = ap.cell2node(lon, lat) area = np.full(data[0].shape, np.nan) cells_idx = [] # count number of mask cells falling into each data cell for i in xrange(len(lat)): for j in xrange(len(lon)): i_cells, = np.where((lon_nodes[j] <= lon_msk) & \ (lon_msk <= lon_nodes[j+1]) & \ (lat_nodes[i] <= lat_msk) & \ (lat_msk <= lat_nodes[i+1]) & \ (msk == 4)) # 4 = ice shelf area[i,j] = len(i_cells) # each mask cell is 1 km**2
d1 = f1.variables firn = d1["zs"] year1 = d1["time"][:] lons1 = d1["lon"][:] # 2d lats1 = d1["lat"][:] # 2d ism = d1["ism"][:] lsm = d1["lsm"][:] dt1 = ap.year2date(year1) # dt1 = year1 f2 = tb.openFile(FILE2) elev = f2.root.elev[:] time2 = f2.root.time[:] lon2 = f2.root.lon[:] # 1d lat2 = f2.root.lat[:] # 1d lon2 = ap.lon_180_360(lon2, inverse=True) lons2, lats2 = np.meshgrid(lon2, lat2) points2 = np.column_stack((lons2.ravel(), lats2.ravel())) # points2 = (lons2[6,4], lats2[6,4]) dt2 = ap.num2date(time2) # dt2 = ap.num2year(time2) """ f3 = tb.openFile(FILE3) d3 = f3.root.data[:] y3, x3 = d3[:,0], d3[:,1] x3 = ap.lon_180_360(x3, inverse=True) f4 = tb.openFile(FILE4) d4 = f4.root.data[:] y4, x4 = d4[:,0], d4[:,1]