def main(): parser = getparser() args = parser.parse_args() fn = args.fn if not iolib.fn_check(fn): sys.exit("Unable to locate input file: %s" % fn) #Need some checks on these param = args.param print("Loading input raster into masked array") ds = iolib.fn_getds(fn) #Currently supports only single band operations r = iolib.ds_getma(ds, 1) #May need to cast input ma as float32 so np.nan filling works #r = r.astype(np.float32) #Want function that checks and returns float32 if necessary #Should filter, then return original dtype r_fltr = r #Loop through all specified input filters #for filt in args.filt: filt = args.filt[0] if len(param) == 1: param = param[0] param_str = '' if filt == 'range': #Range filter param = [float(i) for i in param[1:]] r_fltr = filtlib.range_fltr(r_fltr, param) param_str = '_{0:0.2f}-{1:0.2f}'.format(*param) elif filt == 'absrange': #Range filter of absolute values param = [float(i) for i in param[1:]] r_fltr = filtlib.absrange_fltr(r_fltr, param) param_str = '_{0:0.2f}-{1:0.2f}'.format(*param) elif filt == 'perc': #Percentile filter param = [float(i) for i in param[1:]] r_fltr = filtlib.perc_fltr(r, perc=param) param_str = '_{0:0.2f}-{1:0.2f}'.format(*param) elif filt == 'med': #Median filter param = int(param) r_fltr = filtlib.rolling_fltr(r_fltr, f=np.nanmedian, size=param) #r_fltr = filtlib.median_fltr(r_fltr, fsize=param, origmask=True) #r_fltr = filtlib.median_fltr_skimage(r_fltr, radius=4, origmask=True) param_str = '_%ipx' % param elif filt == 'gauss': #Gaussian filter (default) param = int(param) r_fltr = filtlib.gauss_fltr_astropy(r_fltr, size=param, origmask=False, fill_interior=False) param_str = '_%ipx' % param elif filt == 'highpass': #High pass filter param = int(param) r_fltr = filtlib.highpass(r_fltr, size=param) param_str = '_%ipx' % param elif filt == 'sigma': #n*sigma filter, remove outliers param = int(param) r_fltr = filtlib.sigma_fltr(r_fltr, n=param) param_str = '_n%i' % param elif filt == 'mad': #n*mad filter, remove outliers #Maybe better to use a percentile filter param = int(param) r_fltr = filtlib.mad_fltr(r_fltr, n=param) param_str = '_n%i' % param elif filt == 'dz': #Difference filter, need to specify ref_fn and range #Could let the user compute their own dz, then just run a standard range or absrange filter ref_fn = param[0] ref_ds = warplib.memwarp_multi_fn([ ref_fn, ], res=ds, extent=ds, t_srs=ds)[0] ref = iolib.ds_getma(ref_ds) param = [float(i) for i in param[1:]] r_fltr = filtlib.dz_fltr_ma(r, ref, rangelim=param) #param_str = '_{0:0.2f}-{1:0.2f}'.format(*param) param_str = '_{0:0.0f}_{1:0.0f}'.format(*param) else: sys.exit("No filter type specified") #Compute and print stats before/after if args.stats: print("Input stats:") malib.print_stats(r) print("Filtered stats:") malib.print_stats(r_fltr) #Write out dst_fn = os.path.splitext(fn)[0] + '_%sfilt%s.tif' % (filt, param_str) if args.outdir is not None: outdir = args.outdir if not os.path.exists(outdir): os.makedirs(outdir) dst_fn = os.path.join(outdir, os.path.split(dst_fn)[-1]) print("Writing out filtered raster: %s" % dst_fn) iolib.writeGTiff(r_fltr, dst_fn, ds)
def sample_stack(ex, ey, geoid_offset=False, pad=3): if ex > m.shape[2]-1 or ey > m.shape[1]-1: print "Input coordinates are outside stack extent:" print ex, ey print m.shape v = None else: print "Sampling with pad: %i" % pad if pad == 0: v = m[:,ey,ex] else: window_x = np.around(np.clip([ex-pad, ex+pad+1], 0, m.shape[2]-1)).astype(int) window_y = np.around(np.clip([ey-pad, ey+pad+1], 0, m.shape[1]-1)).astype(int) print window_x print window_y v = m[:,window_y[0]:window_y[1],window_x[0]:window_x[1]].reshape(m.shape[0], np.ptp(window_x)*np.ptp(window_y)) #v = v.mean(axis=1) v = np.ma.median(v, axis=1) if v.count() == 0: print "No valid values" else: mx, my = geolib.pixelToMap(ex, ey, gt) print ex, ey, mx, my print "Count: %i" % v.count() #Hack to get elevations relative to geoid #Note: this can be added multiple times if clicked quickly if geoid_offset: #geoid_offset = geolib.sps2geoid(mx, my, 0.0)[2] geoid_offset = geolib.nps2geoid(mx, my, 0.0)[2] print "Removing geoid offset: %0.1f" % geoid_offset v += geoid_offset #Should filter here #RS1 has some values that are many 1000s of m/yr below neighbors if filter_outliers: if True: med = malib.fast_median(v) mad = malib.mad(v) min_v = med - mad*4 f_idx = (v < min_v).filled(False) if np.any(f_idx): print med, mad print "Outliers removed by absolute filter: (val < %0.1f)" % min_v print timelib.o2dt(d[f_idx]) print v[f_idx] v[f_idx] = np.ma.masked if True: v_idx = (~np.ma.getmaskarray(v)).nonzero()[0] #This tries to maintain fixed window in time f = filtlib.rolling_fltr(v, size=7) #This uses fixed number of neighbors f = filtlib.rolling_fltr(v[v_idx], size=7) #f_diff = np.abs(f - v) #Note: the issue is usually that the velocity values are too low #f_diff = f - v f_diff = f - v[v_idx] diff_thresh = 2000 #f_idx = (f_diff > diff_thresh).filled(False) #f_idx = (f_diff < diff_thresh).filled(False) f_idx = np.zeros_like(v.data).astype(bool) f_idx[v_idx] = (f_diff > diff_thresh) if np.any(f_idx): print "Outliers removed by rolling median filter: (val < %0.1f)" % diff_thresh print timelib.o2dt(d[f_idx]) print v[f_idx] v[f_idx] = np.ma.masked return v
if args.density is None: #Attempt to extract from nearby SNOTEL sites for dem_ts #Attempt to use model #Last resort, use constant value rho_s = 0.5 #rho_s = 0.4 #rho_s = 0.36 #Convert snow depth to swe swe = dz * rho_s if args.filter: print("Filtering SWE map") #Median filter to remove artifacts swe_f = filtlib.rolling_fltr(swe, size=5) #Gaussian filter to smooth over gaps swe_f = filtlib.gauss_fltr_astropy(swe, size=9) swe = swe_f swe_clim = list(malib.calcperc(swe, (1,99))) swe_clim[0] = 0 swe_clim = (0, 8) prism = None nax = 2 figsize = (8, 4) if args.prism: #This is PRISM 30-year normal winter PRECIP prism_fn = '/Users/dshean/data/PRISM_ppt_30yr_normal_800mM2_10-05_winter_cum.tif' if os.path.exists(prism_fn):
print("Input pixel count: %s" % trend.count()) #Outlier filter #This can remove valid pixels for larger glaciers (e.g. Baltoro) with negative dh/dt - could scale based on pixel count #trend_filt = filtlib.mad_fltr(trend, n=4) #trend_filt = filtlib.sigma_fltr(trend, n=3) #print("Output pixel count: %s" % trend_filt.count()) #Remove islands #trend_filt = filtlib.remove_islands(trend, iterations=1) #Erode edges near nodata trend_filt = filtlib.erode_edge(trend, iterations=1) print("Output pixel count: %s" % trend_filt.count()) #Rolling median filter (remove noise) - can use a slightly larger window here trend_filt = filtlib.rolling_fltr(trend_filt, size=size, circular=True, origmask=True) print("Output pixel count: %s" % trend_filt.count()) #Gaussian filter (smooth) #trend_filt = filtlib.gauss_fltr_astropy(trend_filt, size=size, origmask=True, fill_interior=True) trend_filt = filtlib.gauss_fltr_astropy(trend_filt, size=size) print("Output pixel count: %s" % trend_filt.count()) trend_fn=out_fn+'_trend_%spx_filt.tif' % size print("Writing out: %s" % trend_fn) iolib.writeGTiff(trend_filt*365.25, trend_fn, trend_ds) #Update intercept using new filtered slope values #Need to update for different periods? #dt_pivot = timelib.mean_date(datetime(2000,5,31), datetime(2009,5,31)) #dt_pivot = timelib.mean_date(datetime(2009,5,31), datetime(2018,5,31))