Ejemplo n.º 1
0
def butter_low(dt_list, val, lowpass=1.0):
    import scipy.signal
    #val_mask = np.ma.getmaskarray(val)
    #dt is 300 s, 5 min
    dt_diff = np.diff(dt_list)
    if isinstance(dt_diff[0], float):
        dt_diff *= 86400.
    else:
        dt_diff = np.array([dt.total_seconds() for dt in dt_diff])
    dt = malib.fast_median(dt_diff)
    #f is 0.00333 Hz
    #288 samples/day
    fs = 1./dt
    nyq = fs/2.
    order = 3
    f_max = (1./(86400*lowpass)) / nyq
    b, a = scipy.signal.butter(order, f_max, btype='lowpass')
    #b, a = sp.signal.butter(order, (f_min, f_max), btype='bandstop')
    w, h = scipy.signal.freqz(b, a, worN=2000)
    # w_f = (nyq/np.pi)*w
    # w_f_days = 1/w_f/86400.
    #plt.plot(w_f_days, np.abs(h))

    val_f = scipy.signal.filtfilt(b, a, val)
    return val_f
Ejemplo n.º 2
0
def butter(dt_list, val, lowpass=1.0):
    """This is framework for a butterworth bandpass for 1D data
    
    Needs to be cleaned up and generalized
    """
    import scipy.signal
    import matplotlib.pyplot as plt
    #dt is 300 s, 5 min
    dt_diff = np.diff(dt_list)
    dt_diff = np.array([dt.total_seconds() for dt in dt_diff])
    dt = malib.fast_median(dt_diff)
    #f is 0.00333 Hz
    #288 samples/day
    fs = 1./dt
    nyq = fs/2.

    if False:
        #psd, f = psd(z_msl, fs) 
        sp_f, sp_psd = scipy.signal.periodogram(val, fs, detrend='linear')
        #sp_f, sp_psd = scipy.signal.welch(z_msl, fs, nperseg=2048)
        sp_f_days = 1./sp_f/86400.

        plt.figure()
        plt.plot(sp_f, sp_psd)
        plt.plot(sp_f_days, sp_psd)
        plt.semilogy(sp_f_days, sp_psd)
        plt.xlabel('Frequency')
        plt.ylabel('Power')

    print("Filtering tidal signal")
    #Define bandpass filter
    #f_min = dt/(86400*0.25)
    f_max = (1./(86400*0.1)) / nyq
    f_min = (1./(86400*1.8)) / nyq
    order = 6
    b, a = scipy.signal.butter(order, f_min, btype='highpass')
    #b, a = sp.signal.butter(order, (f_min, f_max), btype='bandpass')
    w, h = scipy.signal.freqz(b, a, worN=2000)
    w_f = (nyq/np.pi)*w
    w_f_days = 1/w_f/86400.
    #plt.figure()
    #plt.plot(w_f_days, np.abs(h))
    val_f_tide = scipy.signal.filtfilt(b, a, val)

    b, a = scipy.signal.butter(order, f_max, btype='lowpass')
    #b, a = sp.signal.butter(order, (f_min, f_max), btype='bandstop')
    w, h = scipy.signal.freqz(b, a, worN=2000)
    w_f = (nyq/np.pi)*w
    w_f_days = 1/w_f/86400.
    #plt.plot(w_f_days, np.abs(h))

    val_f_tide_denoise = scipy.signal.filtfilt(b, a, val_f_tide)
    #val_f_notide = sp.signal.filtfilt(b, a, val)
    val_f_notide = val - val_f_tide 
Ejemplo n.º 3
0
def compute_offset_nuth(dh, slope, aspect, min_count=100, remove_outliers=True, plot=True):
    """Compute horizontal offset between input rasters using Nuth and Kaab [2011] (nuth) method
    """
    import scipy.optimize as optimization

    if dh.count() < min_count:
        sys.exit("Not enough dh samples")
    if slope.count() < min_count:
        sys.exit("Not enough slope/aspect samples")

    #mean_dh = dh.mean()
    #mean_slope = slope.mean()
    #c_seed = (mean_dh/np.tan(np.deg2rad(mean_slope))) 
    med_dh = malib.fast_median(dh)
    med_slope = malib.fast_median(slope)
    c_seed = (med_dh/np.tan(np.deg2rad(med_slope))) 

    x0 = np.array([0.0, 0.0, c_seed])
  
    print("Computing common mask")
    common_mask = ~(malib.common_mask([dh, aspect, slope]))

    #Prepare x and y data
    xdata = aspect[common_mask].data
    ydata = (dh[common_mask]/np.tan(np.deg2rad(slope[common_mask]))).data

    print("Initial sample count:")
    print(ydata.size)

    if remove_outliers:
        print("Removing outliers")
        #print("Absolute dz filter: %0.2f" % max_dz)
        #diff = np.ma.masked_greater(diff, max_dz)
        #print(diff.count())

        #Outlier dz filter
        f = 3
        sigma, u = (ydata.std(), ydata.mean())
        #sigma, u = malib.mad(ydata, return_med=True)
        rmin = u - f*sigma
        rmax = u + f*sigma
        print("3-sigma filter: %0.2f - %0.2f" % (rmin, rmax))
        idx = (ydata >= rmin) & (ydata <= rmax)
        xdata = xdata[idx]
        ydata = ydata[idx]
        print(ydata.size)

    #Generate synthetic data to test curve_fit
    #xdata = np.arange(0,360,0.01)
    #ydata = f(xdata, 20.0, 130.0, -3.0) + 20*np.random.normal(size=len(xdata))
    
    #Limit sample size
    #n = 10000
    #idx = random.sample(range(xdata.size), n)
    #xdata = xdata[idx]
    #ydata = ydata[idx]

    #Compute robust statistics for 1-degree bins
    nbins = 360
    bin_range = (0., 360.)
    bin_width = 1.0
    bin_count, bin_edges, bin_centers = malib.bin_stats(xdata, ydata, stat='count', nbins=nbins, bin_range=bin_range)
    bin_med, bin_edges, bin_centers = malib.bin_stats(xdata, ydata, stat='median', nbins=nbins, bin_range=bin_range)
    #Needed to estimate sigma for weighted lsq
    #bin_mad, bin_edges, bin_centers = malib.bin_stats(xdata, ydata, stat=malib.mad, nbins=nbins, bin_range=bin_range)
    #Started implementing this for more generic binning, needs testing
    #bin_count, x_bin_edges, y_bin_edges = malib.get_2dhist(xdata, ydata, \
    #        xlim=bin_range, nbins=(nbins, nbins), stat='count')

    """
    #Mask bins in grid directions, can potentially contain biased stats
    #Especially true for SGM algorithm
    #badbins = [0, 90, 180, 270, 360]
    badbins = [0, 45, 90, 135, 180, 225, 270, 315, 360]
    bin_stat = np.ma.masked_where(np.around(bin_edges[:-1]) % 45 == 0, bin_stat)
    bin_edges = np.ma.masked_where(np.around(bin_edges[:-1]) % 45 == 0, bin_edges)
    """

    #Remove any bins with only a few points
    min_bin_sample_count = 9
    idx = (bin_count.filled(0) >= min_bin_sample_count) 
    bin_count = bin_count[idx].data
    bin_med = bin_med[idx].data
    #bin_mad = bin_mad[idx].data
    bin_centers = bin_centers[idx]

    fit = None
    fit_fig = None

    #Want a good distribution of bins, at least 1/4 to 1/2 of sinusoid, to ensure good fit
    #Need at least 3 valid bins to fit 3 parameters in nuth_func
    #min_bin_count = 3
    min_bin_count = 90 
    
    #Not going to help if we have a step function between two plateaus, but better than nothing
    #Calculate bin aspect spread
    bin_ptp = np.cos(np.radians(bin_centers)).ptp()
    min_bin_ptp = 1.0 

    #Should iterate here, if not enough bins, increase bin width
    if len(bin_med) >= min_bin_count and bin_ptp >= min_bin_ptp:

        print("Computing fit")
        #Unweighted fit
        fit = optimization.curve_fit(nuth_func, bin_centers, bin_med, x0)[0]

        #Weight by observed spread in each bin 
        #sigma = bin_mad
        #fit = optimization.curve_fit(nuth_func, bin_centers, bin_med, x0, sigma, absolute_sigma=True)[0]

        #Weight by bin count
        #sigma = bin_count.max()/bin_count
        #fit = optimization.curve_fit(nuth_func, bin_centers, bin_med, x0, sigma, absolute_sigma=False)[0]

        print(fit)

        if plot:
            print("Generating Nuth and Kaab plot")
            bin_idx = np.digitize(xdata, bin_edges)
            output = []
            for i in np.arange(1, len(bin_edges)):
                output.append(ydata[bin_idx==i])
            #flierprops={'marker':'.'}
            lw = 0.25
            whiskerprops={'linewidth':lw}
            capprops={'linewidth':lw}
            boxprops={'facecolor':'k', 'linewidth':0}
            medianprops={'marker':'o', 'ms':1, 'color':'r'}
            fit_fig, ax = plt.subplots(figsize=(6,6))
            #widths = (bin_width/2.0)
            widths = 2.5*(bin_count/bin_count.max())
            #widths = bin_count/np.percentile(bin_count, 50)
            #Stride
            s=3
            #This is inefficient, but we have list of arrays with different length, need to filter
            #Reduntant with earlier filter, should refactor
            bp = ax.boxplot(np.array(output)[idx][::s], positions=bin_centers[::s], widths=widths[::s], showfliers=False, \
                    patch_artist=True, boxprops=boxprops, whiskerprops=whiskerprops, capprops=capprops, \
                    medianprops=medianprops)
            bin_ticks = [0, 45, 90, 135, 180, 225, 270, 315, 360]
            ax.set_xticks(bin_ticks)
            ax.set_xticklabels(bin_ticks)
            """
            #Can pull out medians from boxplot
            #We are computing multiple times, inefficient
            bp_bin_med = []
            for medline in bp['medians']:
                bp_bin_med.append(medline.get_ydata()[0])
            """

            #Plot the fit
            f_a = nuth_func(bin_centers, fit[0], fit[1], fit[2])
            nuth_func_str = r'$y=%0.2f*cos(%0.2f-x)+%0.2f$' % tuple(fit)
            ax.plot(bin_centers, f_a, 'b', label=nuth_func_str)

            ax.set_xlabel('Aspect (deg)')
            ax.set_ylabel('dh/tan(slope) (m)')
            ax.axhline(color='gray', linewidth=0.5)

            ax.set_xlim(*bin_range)
            ylim = ax.get_ylim()
            abs_ylim = np.max(np.abs(ylim))
            #abs_ylim = np.max(np.abs([ydata.min(), ydata.max()]))
            #pad = 0.2 * abs_ylim 
            pad = 0
            ylim = (-abs_ylim - pad, abs_ylim + pad)
            minylim = (-10,10)
            if ylim[0] > minylim[0]:
                ylim = minylim
            ax.set_ylim(*ylim)
            ax.legend(prop={'size':8})

    return fit, fit_fig
Ejemplo n.º 4
0
def compute_offset(ref_dem_ds, src_dem_ds, src_dem_fn, mode='nuth', remove_outliers=True, max_offset=100, \
        max_dz=100, slope_lim=(0.1, 40), mask_list=['glaciers',], plot=True):
    #Make sure the input datasets have the same resolution/extent
    #Use projection of source DEM
    ref_dem_clip_ds, src_dem_clip_ds = warplib.memwarp_multi([ref_dem_ds, src_dem_ds], \
            res='max', extent='intersection', t_srs=src_dem_ds, r='cubic')

    #Compute size of NCC and SAD search window in pixels
    res = float(geolib.get_res(ref_dem_clip_ds, square=True)[0])
    max_offset_px = (max_offset/res) + 1
    #print(max_offset_px)
    pad = (int(max_offset_px), int(max_offset_px))

    #This will be updated geotransform for src_dem
    src_dem_gt = np.array(src_dem_clip_ds.GetGeoTransform())

    #Load the arrays
    ref_dem = iolib.ds_getma(ref_dem_clip_ds, 1)
    src_dem = iolib.ds_getma(src_dem_clip_ds, 1)

    print("Elevation difference stats for uncorrected input DEMs (src - ref)")
    diff = src_dem - ref_dem

    static_mask = get_mask(src_dem_clip_ds, mask_list, src_dem_fn)
    diff = np.ma.array(diff, mask=static_mask)

    if diff.count() == 0:
        sys.exit("No overlapping, unmasked pixels shared between input DEMs")

    if remove_outliers:
        diff = outlier_filter(diff, f=3, max_dz=max_dz)

    #Want to use higher quality DEM, should determine automatically from original res/count
    #slope = get_filtered_slope(ref_dem_clip_ds, slope_lim=slope_lim)
    slope = get_filtered_slope(src_dem_clip_ds, slope_lim=slope_lim)

    print("Computing aspect")
    #aspect = geolib.gdaldem_mem_ds(ref_dem_clip_ds, processing='aspect', returnma=True, computeEdges=False)
    aspect = geolib.gdaldem_mem_ds(src_dem_clip_ds, processing='aspect', returnma=True, computeEdges=False)

    ref_dem_clip_ds = None
    src_dem_clip_ds = None

    #Apply slope filter to diff
    #Note that we combine masks from diff and slope in coreglib
    diff = np.ma.array(diff, mask=np.ma.getmaskarray(slope))

    #Get final mask after filtering
    static_mask = np.ma.getmaskarray(diff)

    #Compute stats for new masked difference map
    print("Filtered difference map")
    diff_stats = malib.print_stats(diff)
    dz = diff_stats[5]

    print("Computing sub-pixel offset between DEMs using mode: %s" % mode)

    #By default, don't create output figure
    fig = None

    #Default horizntal shift is (0,0)
    dx = 0
    dy = 0

    #Sum of absolute differences
    if mode == "sad":
        ref_dem = np.ma.array(ref_dem, mask=static_mask)
        src_dem = np.ma.array(src_dem, mask=static_mask)
        m, int_offset, sp_offset = coreglib.compute_offset_sad(ref_dem, src_dem, pad=pad)
        #Geotransform has negative y resolution, so don't need negative sign
        #np array is positive down
        #GDAL coordinates are positive up
        dx = sp_offset[1]*src_dem_gt[1]
        dy = sp_offset[0]*src_dem_gt[5]
    #Normalized cross-correlation of clipped, overlapping areas
    elif mode == "ncc":
        ref_dem = np.ma.array(ref_dem, mask=static_mask)
        src_dem = np.ma.array(src_dem, mask=static_mask)
        m, int_offset, sp_offset, fig = coreglib.compute_offset_ncc(ref_dem, src_dem, \
                pad=pad, prefilter=False, plot=plot)
        dx = sp_offset[1]*src_dem_gt[1]
        dy = sp_offset[0]*src_dem_gt[5]
    #Nuth and Kaab (2011)
    elif mode == "nuth":
        #Compute relationship between elevation difference, slope and aspect
        fit_param, fig = coreglib.compute_offset_nuth(diff, slope, aspect, plot=plot)
        if fit_param is None:
            print("Failed to calculate horizontal shift")
        else:
            #fit_param[0] is magnitude of shift vector
            #fit_param[1] is direction of shift vector
            #fit_param[2] is mean bias divided by tangent of mean slope
            #print(fit_param)
            dx = fit_param[0]*np.sin(np.deg2rad(fit_param[1]))
            dy = fit_param[0]*np.cos(np.deg2rad(fit_param[1]))
            med_slope = malib.fast_median(slope)
            nuth_dz = fit_param[2]*np.tan(np.deg2rad(med_slope))
            print('Median dz: %0.2f\nNuth dz: %0.2f' % (dz, nuth_dz))
            #dz = nuth_dz
    elif mode == "all":
        print("Not yet implemented")
        #Want to compare all methods, average offsets
        #m, int_offset, sp_offset = coreglib.compute_offset_sad(ref_dem, src_dem)
        #m, int_offset, sp_offset = coreglib.compute_offset_ncc(ref_dem, src_dem)
    elif mode == "none":
        print("Skipping alignment, writing out DEM with median bias over static surfaces removed")
        dst_fn = outprefix+'_med%0.1f.tif' % dz
        iolib.writeGTiff(src_dem_orig + dz, dst_fn, src_dem_ds)
        sys.exit()
    #Note: minus signs here since we are computing dz=(src-ref), but adjusting src
    return -dx, -dy, -dz, static_mask, fig
Ejemplo n.º 5
0
        z1_bin_counts, z1_bin_edges = np.histogram(z1, bins=z_bin_edges)
        z1_bin_areas = z1_bin_counts * ds_res[0] * ds_res[1] / 1E6
        z2_bin_counts, z2_bin_edges = np.histogram(z2, bins=z_bin_edges)
        z2_bin_areas = z2_bin_counts * ds_res[0] * ds_res[1] / 1E6

        #dz_bin_edges, dz_bin_centers = get_bins(dz, 1.)
        #dz_bin_counts, dz_bin_edges = np.histogram(dz, bins=dz_bin_edges)
        #dz_bin_areas = dz_bin_counts * ds_res * ds_res / 1E6
        dz_bin_med = np.ma.masked_all_like(z1_bin_areas)
        dz_bin_mad = np.ma.masked_all_like(z1_bin_areas)
        idx = np.digitize(z1, z_bin_edges)
        for bin_n in range(z_bin_centers.size):
            dz_bin_samp = mb[(idx == bin_n + 1)]
            #dz_bin_samp = dhdt[(idx == n+1)]
            if dz_bin_samp.count() > 0:
                dz_bin_med[bin_n] = malib.fast_median(dz_bin_samp)
                dz_bin_mad[bin_n] = malib.mad(dz_bin_samp)
                dz_bin_med[bin_n] = dz_bin_samp.mean()
                dz_bin_mad[bin_n] = dz_bin_samp.std()

        print("Generating map plot")
        f, axa = plt.subplots(1, 3, figsize=(10, 7.5))
        f.suptitle(feat_fn)
        alpha = 1.0
        hs = True
        if hs:
            z1_hs = geolib.gdaldem_wrapper(out_z1_fn,
                                           product='hs',
                                           returnma=True)
            z2_hs = geolib.gdaldem_wrapper(out_z2_fn,
                                           product='hs',
Ejemplo n.º 6
0
def hist_plot(gf, outdir, bin_width=10.0):
    #print("Generating histograms")
    #Create bins for full range of input data and specified bin width

    #NOTE: these counts/areas are for valid pixels only
    #Not necessarily a true representation of actual glacier hypsometry
    #Need a void-filled DEM for this

    z_bin_edges, z_bin_centers = malib.get_bins(gf.z1, bin_width)
    z1_bin_counts, z1_bin_edges = np.histogram(gf.z1, bins=z_bin_edges)
    z1_bin_areas = z1_bin_counts * gf.res[0] * gf.res[1] / 1E6
    #RGI standard is integer thousandths of glaciers total area
    #Should check to make sure sum of bin areas equals total area
    z1_bin_areas_perc = 100. * z1_bin_areas / np.sum(z1_bin_areas)

    z2_bin_counts, z2_bin_edges = np.histogram(gf.z2, bins=z_bin_edges)
    z2_bin_areas = z2_bin_counts * gf.res[0] * gf.res[1] / 1E6
    z2_bin_areas_perc = 100. * z2_bin_areas / np.sum(z2_bin_areas)

    #Create arrays to store output
    mb_bin_med = np.ma.masked_all_like(z1_bin_areas)
    mb_bin_mad = np.ma.masked_all_like(z1_bin_areas)
    mb_bin_mean = np.ma.masked_all_like(z1_bin_areas)
    mb_bin_std = np.ma.masked_all_like(z1_bin_areas)
    dz_bin_med = np.ma.masked_all_like(z1_bin_areas)
    dz_bin_mad = np.ma.masked_all_like(z1_bin_areas)
    dz_bin_mean = np.ma.masked_all_like(z1_bin_areas)
    dz_bin_std = np.ma.masked_all_like(z1_bin_areas)
    if gf.debris_class is not None:
        perc_clean = np.ma.masked_all_like(z1_bin_areas)
        perc_debris = np.ma.masked_all_like(z1_bin_areas)
        perc_pond = np.ma.masked_all_like(z1_bin_areas)
        debris_thick_med = np.ma.masked_all_like(z1_bin_areas)
        debris_thick_mad = np.ma.masked_all_like(z1_bin_areas)

    #Loop through each bin and extract stats
    idx = np.digitize(gf.z1, z_bin_edges)
    for bin_n in range(z_bin_centers.size):
        mb_bin_samp = gf.mb[(idx == bin_n + 1)]
        if mb_bin_samp.count() > 0:
            mb_bin_med[bin_n] = malib.fast_median(mb_bin_samp)
            mb_bin_mad[bin_n] = malib.mad(mb_bin_samp)
            mb_bin_mean[bin_n] = mb_bin_samp.mean()
            mb_bin_std[bin_n] = mb_bin_samp.std()
        dz_bin_samp = gf.dhdt[(idx == bin_n + 1)]
        if dz_bin_samp.count() > 0:
            dz_bin_med[bin_n] = malib.fast_median(dz_bin_samp)
            dz_bin_mad[bin_n] = malib.mad(dz_bin_samp)
            dz_bin_mean[bin_n] = dz_bin_samp.mean()
            dz_bin_std[bin_n] = dz_bin_samp.std()
        if gf.debris_class is not None:
            debris_class_bin_samp = gf.debris_class[(idx == bin_n + 1)]
            if debris_class_bin_samp.count() > 0:
                perc_clean[bin_n] = 100. * (
                    debris_class_bin_samp
                    == 1).sum() / debris_class_bin_samp.count()
                perc_debris[bin_n] = 100. * (
                    debris_class_bin_samp
                    == 2).sum() / debris_class_bin_samp.count()
                perc_pond[bin_n] = 100. * (
                    debris_class_bin_samp
                    == 3).sum() / debris_class_bin_samp.count()

            debris_thick_bin_samp = gf.debris_thick[(idx == bin_n + 1)]
            debris_thick_med[bin_n] = malib.fast_median(debris_thick_bin_samp)
            debris_thick_mad[bin_n] = malib.mad(debris_thick_bin_samp)

    outbins_header = 'bin_center_elev_m, z1_bin_count_valid, z1_bin_area_valid_km2, z1_bin_area_perc, z2_bin_count_valid, z2_bin_area_valid_km2, z2_bin_area_perc, dhdt_bin_med_ma, dhdt_bin_mad_ma, dhdt_bin_mean_ma, dhdt_bin_std_ma, mb_bin_med_mwea, mb_bin_mad_mwea, mb_bin_mean_mwea, mb_bin_std_mwea'
    fmt = '%0.1f, %i, %0.3f, %0.2f, %i, %0.3f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f'
    outbins = [
        z_bin_centers, z1_bin_counts, z1_bin_areas, z1_bin_areas_perc,
        z2_bin_counts, z2_bin_areas, z2_bin_areas_perc, dz_bin_med, dz_bin_mad,
        dz_bin_mean, dz_bin_std, mb_bin_med, mb_bin_mad, mb_bin_mean,
        mb_bin_std
    ]

    if gf.debris_class is not None:
        outbins_header += ', debris_thick_med_m, debris_thick_mad_m, perc_debris, perc_pond, perc_clean'
        fmt += ', %0.2f, %0.2f, %0.2f, %0.2f, %0.2f'
        debris_thick_med[debris_thick_med == -(np.inf)] = 0.00
        debris_thick_mad[debris_thick_mad == -(np.inf)] = 0.00
        outbins.extend([
            debris_thick_med, debris_thick_mad, perc_debris, perc_pond,
            perc_clean
        ])

    #print(len(outbins), len(fmt.split(',')), len(outbins_header.split(',')))
    outbins = np.ma.array(outbins).T.astype('float32')
    np.ma.set_fill_value(outbins, -9999.0)
    outbins_fn = os.path.join(outdir, gf.feat_fn + '_mb_bins.csv')
    #print(outbins.shape)
    np.savetxt(outbins_fn,
               outbins,
               fmt=fmt,
               delimiter=',',
               header=outbins_header)

    #print("Generating aed plot")
    #f,axa = plt.subplots(1,2, figsize=(6, 6))
    f, axa = plt.subplots(1, 3, figsize=(10, 7.5))
    f.suptitle(gf.feat_fn)
    axa[0].plot(z1_bin_areas, z_bin_centers, label='%0.2f' % gf.t1)
    axa[0].plot(z2_bin_areas, z_bin_centers, label='%0.2f' % gf.t2)
    axa[0].axhline(gf.z1_ela, ls=':', c='C0')
    axa[0].axhline(gf.z2_ela, ls=':', c='C1')
    axa[0].legend(prop={'size': 8}, loc='upper right')
    axa[0].set_ylabel('Elevation (m WGS84)')
    axa[0].set_xlabel('Area $\mathregular{km^2}$')
    pltlib.minorticks_on(axa[0])
    axa[1].axvline(0, lw=1.0, c='k')
    axa[1].axvline(gf.mb_mean,
                   lw=0.5,
                   ls=':',
                   c='k',
                   label='%0.2f m w.e./yr' % gf.mb_mean)
    axa[1].legend(prop={'size': 8}, loc='upper right')
    axa[1].plot(mb_bin_med, z_bin_centers, color='k')
    axa[1].fill_betweenx(z_bin_centers,
                         mb_bin_med - mb_bin_mad,
                         mb_bin_med + mb_bin_mad,
                         color='k',
                         alpha=0.1)
    axa[1].fill_betweenx(z_bin_centers,
                         0,
                         mb_bin_med,
                         where=(mb_bin_med < 0),
                         color='r',
                         alpha=0.2)
    axa[1].fill_betweenx(z_bin_centers,
                         0,
                         mb_bin_med,
                         where=(mb_bin_med > 0),
                         color='b',
                         alpha=0.2)
    #axa[1].set_ylabel('Elevation (m WGS84)')
    #axa[1].set_xlabel('dh/dt (m/yr)')
    axa[1].set_xlabel('mb (m w.e./yr)')
    pltlib.minorticks_on(axa[1])
    #Hide y-axis labels
    axa[1].axes.yaxis.set_ticklabels([])
    #axa[1].set_xlim(-2.0, 2.0)
    #axa[1].set_xlim(-8.0, 8.0)
    axa[1].set_xlim(-3.0, 3.0)
    if gf.debris_class is not None:
        axa[2].errorbar(debris_thick_med * 100.,
                        z_bin_centers,
                        xerr=debris_thick_mad * 100,
                        color='k',
                        fmt='o',
                        ms=3,
                        label='Thickness',
                        alpha=0.6)
        axa[2].plot(perc_debris,
                    z_bin_centers,
                    color='sienna',
                    label='Debris Coverage')
        axa[2].plot(perc_pond,
                    z_bin_centers,
                    color='turquoise',
                    label='Pond Coverage')
        axa[2].set_xlim(0, 100)
        pltlib.minorticks_on(axa[2])
        axa[2].legend(prop={'size': 8}, loc='upper right')
        axa[2].set_xlabel('Debris thickness (cm), coverage (%)')
        axa[2].yaxis.tick_right()
        axa[2].yaxis.set_label_position("right")
    plt.tight_layout()
    #Make room for suptitle
    plt.subplots_adjust(top=0.95)
    #print("Saving aed plot")
    fig_fn = os.path.join(outdir, gf.feat_fn + '_mb_aed.png')
    plt.savefig(fig_fn, bbox_inches='tight', dpi=300)
    plt.close(f)
    return z_bin_edges
Ejemplo n.º 7
0
def sample_stack(ex, ey, geoid_offset=False, pad=3):
    if ex > m.shape[2]-1 or ey > m.shape[1]-1:
        print "Input coordinates are outside stack extent:"
        print ex, ey
        print m.shape
        v = None
    else:
        print "Sampling with pad: %i" % pad
        if pad == 0:
            v = m[:,ey,ex]
        else:
            window_x = np.around(np.clip([ex-pad, ex+pad+1], 0, m.shape[2]-1)).astype(int)
            window_y = np.around(np.clip([ey-pad, ey+pad+1], 0, m.shape[1]-1)).astype(int)
            print window_x
            print window_y
            v = m[:,window_y[0]:window_y[1],window_x[0]:window_x[1]].reshape(m.shape[0], np.ptp(window_x)*np.ptp(window_y))
            #v = v.mean(axis=1)
            v = np.ma.median(v, axis=1)
        if v.count() == 0:
            print "No valid values"
        else:
            mx, my = geolib.pixelToMap(ex, ey, gt)
            print ex, ey, mx, my
            print "Count: %i" % v.count()
            #Hack to get elevations relative to geoid
            #Note: this can be added multiple times if clicked quickly
            if geoid_offset:
                #geoid_offset = geolib.sps2geoid(mx, my, 0.0)[2]
                geoid_offset = geolib.nps2geoid(mx, my, 0.0)[2]
                print "Removing geoid offset: %0.1f" % geoid_offset
                v += geoid_offset
        #Should filter here
        #RS1 has some values that are many 1000s of m/yr below neighbors
        if filter_outliers:
            if True:
                med = malib.fast_median(v)
                mad = malib.mad(v)
                min_v = med - mad*4
                f_idx = (v < min_v).filled(False)
                if np.any(f_idx):
                    print med, mad
                    print "Outliers removed by absolute filter: (val < %0.1f)" % min_v
                    print timelib.o2dt(d[f_idx])
                    print v[f_idx]
                    v[f_idx] = np.ma.masked
            if True:
                v_idx = (~np.ma.getmaskarray(v)).nonzero()[0]
                #This tries to maintain fixed window in time
                f = filtlib.rolling_fltr(v, size=7)
                #This uses fixed number of neighbors
                f = filtlib.rolling_fltr(v[v_idx], size=7)
                #f_diff = np.abs(f - v)
                #Note: the issue is usually that the velocity values are too low
                #f_diff = f - v
                f_diff = f - v[v_idx]
                diff_thresh = 2000
                #f_idx = (f_diff > diff_thresh).filled(False)
                #f_idx = (f_diff < diff_thresh).filled(False)
                f_idx = np.zeros_like(v.data).astype(bool)
                f_idx[v_idx] = (f_diff > diff_thresh)
                if np.any(f_idx):
                    print "Outliers removed by rolling median filter: (val < %0.1f)" % diff_thresh
                    print timelib.o2dt(d[f_idx])
                    print v[f_idx]
                    v[f_idx] = np.ma.masked
    return v
Ejemplo n.º 8
0
Archivo: disp2v.py Proyecto: whigg/vmap
def main():
    parser = getparser()
    args = parser.parse_args()

    t_unit = args.dt
    plot = args.plot
    remove_offsets = args.remove_offsets
    mask_fn = args.mask_fn
    if mask_fn is not None:
        remove_offsets = True

    #Input is 3-band disparity map, extract bands directly
    src_fn = args.disp_fn
    if not iolib.fn_check(src_fn):
        sys.exit("Unable to locate input file: %s" % src_fn)

    src_ds = iolib.fn_getds(src_fn)
    if src_ds.RasterCount != 3:
        sys.exit("Input file must be ASP disparity map (3 bands: x, y, mask)")
    #Extract pixel resolution
    h_res, v_res = geolib.get_res(src_ds)

    #Horizontal scale factor
    #If running on disparity_view output (gdal_translate -outsize 5% 5% F.tif F_5.tif)
    #h_res /= 20
    #v_res /= 20

    #Load horizontal and vertical disparities
    h = iolib.ds_getma(src_ds, bnum=1)
    v = iolib.ds_getma(src_ds, bnum=2)

    #ASP output has northward motion as negative values in band 2
    v *= -1

    t1, t2 = timelib.fn_getdatetime_list(src_fn)
    dt = t2 - t1
    #Default t_factor is in 1/years
    t_factor = timelib.get_t_factor(t1, t2)

    #Input timestamp arrays if inputs are mosaics
    if False:
        t1_fn = ''
        t2_fn = ''
        if os.path.exists(t1_fn) and os.path.exists(t2_fn):
            t_factor = timelib.get_t_factor_fn(t1_fn, t2_fn)
        if t_factor is None:
            sys.exit("Unable to determine input timestamps")

    if t_unit == 'day':
        t_factor *= 365.25

    print("Input dates:")
    print(t1)
    print(t2)
    print(dt)
    print(t_factor, t_unit)

    #Scale values for polar stereographic distortion
    srs = geolib.get_ds_srs(src_ds)
    proj_scale_factor = 1.0
    #Want to scale to get correct distances for polar sterographic
    if srs.IsSame(geolib.nps_srs) or srs.IsSame(geolib.sps_srs):
        proj_scale_factor = geolib.scale_ps_ds(src_ds)

    #Convert disparity values in pixels to m/t_unit
    h_myr = h * h_res * proj_scale_factor / t_factor
    h = None
    v_myr = v * v_res * proj_scale_factor / t_factor
    v = None

    #Velocity Magnitude
    m = np.ma.sqrt(h_myr**2 + v_myr**2)
    print("Velocity Magnitude stats")
    malib.print_stats(m)

    #Remove x and y offsets over control surfaces
    offset_str = ''
    if remove_offsets:
        if mask_fn is None:
            from demcoreg.dem_mask import get_lulc_mask
            print(
                "\nUsing demcoreg to prepare mask of stable control surfaces\n"
            )
            mask = get_lulc_mask(src_ds,
                                 mask_glaciers=True,
                                 filter='rock+ice+water')
        else:
            print("\nWarping input raster mask")
            #This can be from previous dem_mask.py run (e.g. *rockmask.tif)
            mask_ds = warplib.memwarp_multi_fn([
                mask_fn,
            ],
                                               res=src_ds,
                                               extent=src_ds,
                                               t_srs=src_ds)[0]
            mask = iolib.ds_getma(mask_ds)
            #The default from ds_getma is a masked array, so need to isolate boolean mask
            #Assume input is 0 for masked, 1 for unmasked (valid control surface)
            mask = mask.filled().astype('bool')
            #This should work, as the *rockmask.py is 1 for unmasked, 0 for masked, with ndv=0
            #mask = np.ma.getmaskarray(mask)
            #Vector mask - untested
            if os.path.splitext(mask_fn)[1] == 'shp':
                mask = geolib.shp2array(mask_fn, src_ds)

        print("\nRemoving median x and y offset over static control surfaces")
        h_myr_count = h_myr.count()
        h_myr_static_count = h_myr[mask].count()
        h_myr_med = malib.fast_median(h_myr[mask])
        v_myr_med = malib.fast_median(v_myr[mask])
        h_myr_mad = malib.mad(h_myr[mask])
        v_myr_mad = malib.mad(v_myr[mask])
        print("Static pixel count: %i (%0.1f%%)" %
              (h_myr_static_count,
               100 * float(h_myr_static_count) / h_myr_count))
        print("median (+/-NMAD)")
        print("x velocity offset: %0.2f (+/-%0.2f) m/%s" %
              (h_myr_med, h_myr_mad, t_unit))
        print("y velocity offset: %0.2f (+/-%0.2f) m/%s" %
              (v_myr_med, v_myr_mad, t_unit))
        h_myr -= h_myr_med
        v_myr -= v_myr_med
        offset_str = '_offsetcorr_h%0.2f_v%0.2f' % (h_myr_med, v_myr_med)
        #Velocity Magnitude
        m = np.ma.sqrt(h_myr**2 + v_myr**2)
        print("Velocity Magnitude stats after correction")
        malib.print_stats(m)

    if plot:
        fig_fn = os.path.splitext(src_fn)[0] + '.png'
        label = 'Velocity (m/%s)' % t_unit
        f, ax = make_plot(m, fig_fn, label)
        plotvec(h_myr, v_myr)
        plt.tight_layout()
        plt.savefig(fig_fn,
                    dpi=300,
                    bbox_inches='tight',
                    pad_inches=0,
                    edgecolor='none')

    print("Writing out files")
    gt = src_ds.GetGeoTransform()
    proj = src_ds.GetProjection()
    dst_fn = os.path.splitext(src_fn)[0] + '_vm%s.tif' % offset_str
    iolib.writeGTiff(m, dst_fn, create=True, gt=gt, proj=proj)
    dst_fn = os.path.splitext(src_fn)[0] + '_vx%s.tif' % offset_str
    iolib.writeGTiff(h_myr, dst_fn, create=True, gt=gt, proj=proj)
    dst_fn = os.path.splitext(src_fn)[0] + '_vy%s.tif' % offset_str
    iolib.writeGTiff(v_myr, dst_fn, create=True, gt=gt, proj=proj)
    src_ds = None
Ejemplo n.º 9
0
def hist_plot(gf, outdir, bin_width=10.0):
    #print("Generating histograms")
    z_bin_edges, z_bin_centers = get_bins(gf.z1, bin_width)
    z1_bin_counts, z1_bin_edges = np.histogram(gf.z1, bins=z_bin_edges)
    z1_bin_areas = z1_bin_counts * gf.res[0] * gf.res[1] / 1E6
    #RGI standard is integer thousandths of glaciers total area
    #Should check to make sure sum of bin areas equals total area
    z1_bin_areas_perc = 100. * z1_bin_areas / np.sum(z1_bin_areas)
    z2_bin_counts, z2_bin_edges = np.histogram(gf.z2, bins=z_bin_edges)
    z2_bin_areas = z2_bin_counts * gf.res[0] * gf.res[1] / 1E6
    z2_bin_areas_perc = 100. * z2_bin_areas / np.sum(z2_bin_areas)

    #dz_bin_edges, dz_bin_centers = get_bins(dz, 1.)
    #dz_bin_counts, dz_bin_edges = np.histogram(dz, bins=dz_bin_edges)
    #dz_bin_areas = dz_bin_counts * res * res / 1E6
    mb_bin_med = np.ma.masked_all_like(z1_bin_areas)
    mb_bin_mad = np.ma.masked_all_like(z1_bin_areas)
    dz_bin_med = np.ma.masked_all_like(z1_bin_areas)
    dz_bin_mad = np.ma.masked_all_like(z1_bin_areas)
    idx = np.digitize(gf.z1, z_bin_edges)
    for bin_n in range(z_bin_centers.size):
        mb_bin_samp = gf.mb[(idx == bin_n + 1)]
        if mb_bin_samp.count() > 0:
            mb_bin_med[bin_n] = malib.fast_median(mb_bin_samp)
            mb_bin_mad[bin_n] = malib.mad(mb_bin_samp)
            mb_bin_med[bin_n] = mb_bin_samp.mean()
            mb_bin_mad[bin_n] = mb_bin_samp.std()
        dz_bin_samp = gf.dhdt[(idx == bin_n + 1)]
        if dz_bin_samp.count() > 0:
            dz_bin_med[bin_n] = malib.fast_median(dz_bin_samp)
            dz_bin_mad[bin_n] = malib.mad(dz_bin_samp)
            dz_bin_med[bin_n] = dz_bin_samp.mean()
            dz_bin_mad[bin_n] = dz_bin_samp.std()

    #Should also export original dh/dt numbers, not mb
    #outbins_header = 'bin_center_elev, bin_count, dhdt_bin_med, dhdt_bin_mad, mb_bin_med, mb_bin_mad'
    #outbins = np.ma.dstack([z_bin_centers, z1_bin_counts, dz_bin_med, dz_bin_mad, mb_bin_med, mb_bin_mad]).astype('float32')[0]
    #fmt='%0.2f'
    outbins_header = 'bin_center_elev_m, z1_bin_count_valid, z1_bin_area_valid_km2, z1_bin_area_perc, z2_bin_count_valid, z2_bin_area_valid_km2, z2_bin_area_perc, dhdt_bin_med_ma, dhdt_bin_mad_ma, mb_bin_med_mwe, mb_bin_mad_mwe'
    fmt = '%0.1f, %i, %0.3f, %0.2f, %i, %0.3f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f'
    outbins = np.ma.dstack([
        z_bin_centers, z1_bin_counts, z1_bin_areas, z1_bin_areas_perc,
        z2_bin_counts, z2_bin_areas, z2_bin_areas_perc, dz_bin_med, dz_bin_mad,
        mb_bin_med, mb_bin_mad
    ]).astype('float32')[0]
    np.ma.set_fill_value(outbins, -9999.0)
    outbins_fn = os.path.join(outdir, gf.feat_fn + '_mb_bins.csv')
    np.savetxt(outbins_fn,
               outbins,
               fmt=fmt,
               delimiter=',',
               header=outbins_header)

    #print("Generating aed plot")
    f, axa = plt.subplots(1, 2, figsize=(6, 6))
    f.suptitle(gf.feat_fn)
    axa[0].plot(z1_bin_areas, z_bin_centers, label='%0.2f' % gf.t1)
    axa[0].plot(z2_bin_areas, z_bin_centers, label='%0.2f' % gf.t2)
    axa[0].axhline(gf.z1_ela, ls=':', c='C0')
    axa[0].axhline(gf.z2_ela, ls=':', c='C1')
    axa[0].legend(prop={'size': 8}, loc='upper right')
    axa[0].set_ylabel('Elevation (m WGS84)')
    axa[0].set_xlabel('Area $\mathregular{km^2}$')
    axa[0].minorticks_on()
    axa[1].yaxis.tick_right()
    axa[1].yaxis.set_label_position("right")
    axa[1].axvline(0, lw=1.0, c='k')
    axa[1].axvline(gf.mb_mean,
                   lw=0.5,
                   ls=':',
                   c='k',
                   label='%0.2f m w.e./yr' % gf.mb_mean)
    axa[1].legend(prop={'size': 8}, loc='upper right')
    axa[1].plot(mb_bin_med, z_bin_centers, color='k')
    axa[1].fill_betweenx(z_bin_centers,
                         0,
                         mb_bin_med,
                         where=(mb_bin_med < 0),
                         color='r',
                         alpha=0.2)
    axa[1].fill_betweenx(z_bin_centers,
                         0,
                         mb_bin_med,
                         where=(mb_bin_med > 0),
                         color='b',
                         alpha=0.2)
    #axa[1].set_ylabel('Elevation (m WGS84)')
    #axa[1].set_xlabel('dh/dt (m/yr)')
    axa[1].set_xlabel('mb (m w.e./yr)')
    axa[1].minorticks_on()
    axa[1].set_xlim(-2.0, 2.0)
    #axa[1].set_xlim(-8.0, 8.0)
    plt.tight_layout()
    #Make room for suptitle
    plt.subplots_adjust(top=0.95)
    #print("Saving aed plot")
    fig_fn = os.path.join(outdir, gf.feat_fn + '_mb_aed.png')
    plt.savefig(fig_fn, bbox_inches='tight', dpi=300)
    return z_bin_edges
Ejemplo n.º 10
0
def compute_offset_nuth(dh, slope, aspect):
    """Compute horizontal offset between input rasters using Nuth and Kaab [2011] (nuth) method
    """
    import scipy.optimize as optimization

    #mean_dh = dh.mean()
    #mean_slope = slope.mean()
    #c_seed = (mean_dh/np.tan(np.deg2rad(mean_slope))) 
    
    med_dh = malib.fast_median(dh)
    med_slope = malib.fast_median(slope)
    c_seed = (med_dh/np.tan(np.deg2rad(med_slope))) 

    x0 = np.array([0.0, 0.0, c_seed])
  
    print("Computing common mask")
    common_mask = ~(malib.common_mask([dh, aspect, slope]))

    xdata = aspect[common_mask]
    ydata = dh[common_mask]/np.tan(np.deg2rad(slope[common_mask]))

    #Generate synthetic data to test curve_fit
    #xdata = np.arange(0,360,0.01)
    #ydata = f(xdata, 20.0, 130.0, -3.0) + 20*np.random.normal(size=len(xdata))
    
    #Limit sample size
    #n = 10000
    #idx = random.sample(range(xdata.size), n)
    #xdata = xdata[idx]
    #ydata = ydata[idx]

    """
    #Fit to original, unfiltered data
    fit = optimization.curve_fit(nuth_func, xdata, ydata, x0)[0]
    print(fit) 
    genplot(xdata, ydata, fit) 
    """

    """
    #Filter to remove outliers 
    #Compute median absolute difference
    y_med = np.median(ydata)
    y_mad = malib.mad(ydata)
    mad_factor = 3
    y_perc = [y_med - y_mad*mad_factor, y_med + y_mad*mad_factor]

    y_idx = ((ydata >= y_perc[0]) & (ydata <= y_perc[1]))
    ydata_clip = ydata[y_idx]
    xdata_clip = xdata[y_idx]

    fit = optimization.curve_fit(nuth_func, xdata_clip, ydata_clip, x0)[0]
    print(fit)
    genplot(xdata_clip, ydata_clip, fit) 
    """
    #Compute robust statistics for 1-degree bins
    nbins = 360
    bin_range = (0., 360.)
    bin_count, bin_edges, bin_centers = malib.bin_stats(xdata, ydata, stat='count', \
            nbins=nbins, bin_range=bin_range)
    bin_med, bin_edges, bin_centers = malib.bin_stats(xdata, ydata, stat='median', \
            nbins=nbins, bin_range=bin_range)

    """
    #Mask bins in grid directions, can potentially contain biased stats
    badbins = [0, 45, 90, 180, 225, 270, 315]
    bin_stat = np.ma.masked_where(np.around(bin_edges[:-1]) % 45 == 0, bin_stat)
    bin_edges = np.ma.masked_where(np.around(bin_edges[:-1]) % 45 == 0, bin_edges)
    """

    #Remove any empty bins
    #idx = ~(np.ma.getmaskarray(bin_med))

    #Remove any bins with only a few points
    min_count = 9
    idx = (bin_count.filled(0) >= min_count) 

    bin_med = bin_med[idx]
    bin_centers = bin_centers[idx]

    fit = optimization.curve_fit(nuth_func, bin_centers, bin_med, x0)[0]
    f = genplot(bin_centers, bin_med, fit, xdata=xdata, ydata=ydata) 
    plt.show()
    #genplot(xdata, ydata, fit) 

    print(fit)
    return fit, f