Ejemplo n.º 1
0
def main():
    parser = getparser()
    args = parser.parse_args()

    ras_fn = args.ras_fn
    min = args.min
    max = args.max

    print("Loading dz raster into masked array")
    ras_ds = iolib.fn_getds(ras_fn)
    ras = iolib.ds_getma(ras_ds, 1)
    #Cast input ma as float32 so np.nan filling works
    ras = ras.astype(np.float32)
    ras_fltr = ras

    #Absolute range filter
    ras_fltr = filtlib.range_fltr(ras_fltr, (min, max))

    if args.stats:
        print("Input dz raster stats:")
        malib.print_stats(ras)
        print("Filtered dz raster stats:")
        malib.print_stats(ras_fltr)

    #Output filename will have 'filt' appended
    dst_fn = os.path.splitext(ras_fn)[0] + '_filt.tif'

    print("Writing out filtered dz raster: %s" % dst_fn)
    #Note: writeGTiff writes ras_fltr.filled()
    iolib.writeGTiff(ras_fltr, dst_fn, ras_ds)
Ejemplo n.º 2
0
def get_filtered_slope(ds, slope_lim=(0.1, 40)):
    #Generate slope map
    print("Computing slope")
    slope = geolib.gdaldem_mem_ds(ds, processing='slope', returnma=True, computeEdges=False)
    #slope_stats = malib.print_stats(slope)
    print("Slope filter: %0.2f - %0.2f" % slope_lim)
    print("Initial count: %i" % slope.count()) 
    slope = filtlib.range_fltr(slope, slope_lim) 
    print(slope.count())
    return slope
Ejemplo n.º 3
0
def main():
    parser = getparser()
    args = parser.parse_args()

    fn = args.fn
    if not iolib.fn_check(fn):
        sys.exit("Unable to locate input file: %s" % fn)

    #Need some checks on these
    param = args.param

    print("Loading input raster into masked array")
    ds = iolib.fn_getds(fn)
    #Currently supports only single band operations
    r = iolib.ds_getma(ds, 1)

    #May need to cast input ma as float32 so np.nan filling works
    #r = r.astype(np.float32)
    #Want function that checks and returns float32 if necessary
    #Should filter, then return original dtype

    r_fltr = r

    #Loop through all specified input filters
    #for filt in args.filt:
    filt = args.filt[0]

    if len(param) == 1:
        param = param[0]
    param_str = ''

    if filt == 'range':
        #Range filter
        param = [float(i) for i in param[1:]]
        r_fltr = filtlib.range_fltr(r_fltr, param)
        param_str = '_{0:0.2f}-{1:0.2f}'.format(*param)
    elif filt == 'absrange':
        #Range filter of absolute values
        param = [float(i) for i in param[1:]]
        r_fltr = filtlib.absrange_fltr(r_fltr, param)
        param_str = '_{0:0.2f}-{1:0.2f}'.format(*param)
    elif filt == 'perc':
        #Percentile filter
        param = [float(i) for i in param[1:]]
        r_fltr = filtlib.perc_fltr(r, perc=param)
        param_str = '_{0:0.2f}-{1:0.2f}'.format(*param)
    elif filt == 'med':
        #Median filter
        param = int(param)
        r_fltr = filtlib.rolling_fltr(r_fltr, f=np.nanmedian, size=param)
        #r_fltr = filtlib.median_fltr(r_fltr, fsize=param, origmask=True)
        #r_fltr = filtlib.median_fltr_skimage(r_fltr, radius=4, origmask=True)
        param_str = '_%ipx' % param
    elif filt == 'gauss':
        #Gaussian filter (default)
        param = int(param)
        r_fltr = filtlib.gauss_fltr_astropy(r_fltr,
                                            size=param,
                                            origmask=False,
                                            fill_interior=False)
        param_str = '_%ipx' % param
    elif filt == 'highpass':
        #High pass filter
        param = int(param)
        r_fltr = filtlib.highpass(r_fltr, size=param)
        param_str = '_%ipx' % param
    elif filt == 'sigma':
        #n*sigma filter, remove outliers
        param = int(param)
        r_fltr = filtlib.sigma_fltr(r_fltr, n=param)
        param_str = '_n%i' % param
    elif filt == 'mad':
        #n*mad filter, remove outliers
        #Maybe better to use a percentile filter
        param = int(param)
        r_fltr = filtlib.mad_fltr(r_fltr, n=param)
        param_str = '_n%i' % param
    elif filt == 'dz':
        #Difference filter, need to specify ref_fn and range
        #Could let the user compute their own dz, then just run a standard range or absrange filter
        ref_fn = param[0]
        ref_ds = warplib.memwarp_multi_fn([
            ref_fn,
        ],
                                          res=ds,
                                          extent=ds,
                                          t_srs=ds)[0]
        ref = iolib.ds_getma(ref_ds)
        param = [float(i) for i in param[1:]]
        r_fltr = filtlib.dz_fltr_ma(r, ref, rangelim=param)
        #param_str = '_{0:0.2f}-{1:0.2f}'.format(*param)
        param_str = '_{0:0.0f}_{1:0.0f}'.format(*param)
    else:
        sys.exit("No filter type specified")

    #Compute and print stats before/after
    if args.stats:
        print("Input stats:")
        malib.print_stats(r)
        print("Filtered stats:")
        malib.print_stats(r_fltr)

    #Write out
    dst_fn = os.path.splitext(fn)[0] + '_%sfilt%s.tif' % (filt, param_str)
    if args.outdir is not None:
        outdir = args.outdir
        if not os.path.exists(outdir):
            os.makedirs(outdir)
        dst_fn = os.path.join(outdir, os.path.split(dst_fn)[-1])
    print("Writing out filtered raster: %s" % dst_fn)
    iolib.writeGTiff(r_fltr, dst_fn, ds)
Ejemplo n.º 4
0
def main():
    parser = getparser()
    args = parser.parse_args()

    ras_fn = args.ras_fn
    out_name = args.out_name
    pre_min = args.pre_min
    pre_max = args.pre_max
    n_gaus = args.n_gaus
    sample_step = args.sample_step
    ##ht_thresh = args.ht_thresh
    stddev_shift = args.shift

    driverTiff = gdal.GetDriverByName('GTiff')
    print '\n\tCHM Correction'
    print '\tRaster name: %s' % ras_fn

    # [4] Read in raster as a masked array
    array = iolib.fn_getma(ras_fn, bnum=1)
    array = array.astype(np.float32)

    # TODO: fix Slope filter
    # Get hi-sun elev warp-trans-ref-DEM
    #tail_str = "-DEM_warp-trans_reference-DEM"
    #chm_dir, chm_pairname = os.path.split(ras_fn)       # eg chm_pairname WV02_20130804_1030010024808A00_1030010025118000-DEM_warp-trans_reference-DEM_WV01_20150726_1020010043A37200_1020010040698700-DEM_warp-trans_reference-DEM_dz_eul.tif
    #main_dir = os.path.split(chm_dir)[0]
    #diff_pairs = chm_pairname.replace(tail_str,"").replace("_dz_eul.tif","")
    #hi_sun_dem_fn = os.path.join(main_dir,diff_pairs,chm_pairname.split(tail_str)[0] + "-DEM_warp_align",chm_pairname.split(tail_str)[0] + tail_str + ".tif")

    #array = slope_fltr_chm(array, hi_sun_dem_fn)

    # TODO: incidence angle correction of heights

    #Absolute range filter
    # returns a masked array...
    array = filtlib.range_fltr(array, (pre_min, pre_max))

    # Get gaussian peaks
    out_gaus_csv = get_hist_n(array, ras_fn, n_gaus, sample_step)

    with open(out_gaus_csv, 'r') as peaksCSV:
        """Create a canopy height model
            Shift the values of the dz raster based on the ground peak identified in the histogram
                Read in CSV of gaussian peaks computed from the dz raster
                Apply a shift based on the minimum peak and the stddev_shift
                Returns a tif of canopy heights.
        """
        hdr = peaksCSV.readline()
        line = peaksCSV.readline()

    # Get raster diff dsm name
    #ras_fn    = line.split(',')[0]

    # Get the min of the means: represents the the offset value that will be subtracted from each pixel of the corresonding diff_dsm
    gmeans = map(float, line.split(',')[1::2])

    # Find the min of the gaussian peak means
    gmin = min(gmeans)

    # Get corresponding sd
    idx = line.split(',').index(str(gmin)) + 1
    gsd = float(line.split(',')[idx])

    ##array = np.where(array <= -99, np.nan, array)
    gsd_str = "%04d" % (round(gsd, 2) * 100)
    print '\n\tApply CHM gaussian correction:'
    print '\tHeight of the gound peak (m) (gaussian min):  %s' % gmin
    print '\tEstimated height uncertainty (m) (gaussian std dev):  %s' % gsd
    print '\tNumber of std devs used in calculating shift:  %s' % stddev_shift

    shift_val = float(np.subtract(gmin, (stddev_shift * gsd)))

    print "\t: Final CHM correction value (shift) (m) %s" % shift_val
    array = np.subtract(array, shift_val)

    print '\n\tApply masking'
    print '\t\tConvert values below 0 like this:'
    print '\t\t  np.ma.where(array < (0 - 6 * gsd) , 0, abs(array))'
    # Better handling of negative values?
    #   1. take abs value of all negative values?
    #   2. take abs value of all negative values within 1 stddev of ground peak; all the rest convert to 0
    array = np.ma.where(array < (0 - 6 * gsd), 0, abs(array))

    #fn_tail = '_chm_'+gsd_str+'.tif'
    fn_tail = '_chm.tif'
    if out_name is not None:
        chm_fn = os.path.join(os.path.split(ras_fn)[0], out_name + fn_tail)
    else:
        chm_fn = os.path.join(ras_fn.split('.tif')[0] + fn_tail)

    # Write array to dataset
    print "\n\t----------------------"
    print "\n\tMaking CHM GeoTiff: ", chm_fn

    iolib.writeGTiff(array, chm_fn, iolib.fn_getds(ras_fn), ndv=-99)

    cmdStr = "gdaladdo -ro -r nearest " + chm_fn + " 2 4 8 16 32 64"
    run_wait_os(cmdStr)

    # Append to a dir level CSV file that holds the uncertainty info for each CHM (gmin, gsd, stddev_shift)
    out_dir = os.path.split(ras_fn)[0]
    out_stats_csv = out_dir + '_stats.csv'
    print "\tAppending stats to %s" % (out_stats_csv)

    if not os.path.exists(out_stats_csv):
        writetype = 'wb'  # write file if not yet existing
    else:
        writetype = 'ab'  # append line if exists

    with open(out_stats_csv, writetype) as out_stats:
        wr = csv.writer(out_stats, delimiter=",")
        if writetype == 'wb':
            wr.writerow([
                "chm_name", "ground_peak_mean_m", "ground_peak_stdev_m",
                "num_stdevs_shift", "final_chm_peak_shift_m"
            ])  # if new file, write header
        wr.writerow([
            os.path.split(chm_fn)[1],
            str(round(gmin, 2)),
            str(round(gsd, 2)),
            str(round(stddev_shift, 2)),
            str(round(shift_val, 2))
        ])

    print "\tFinished chm_correct.py"
Ejemplo n.º 5
0
    trend_filt = filtlib.gauss_fltr_astropy(trend_filt, size=size)
    print("Output pixel count: %s" % trend_filt.count())

    trend_fn=out_fn+'_trend_%spx_filt.tif' % size
    print("Writing out: %s" % trend_fn)
    iolib.writeGTiff(trend_filt*365.25, trend_fn, trend_ds)

    #Update intercept using new filtered slope values
    #Need to update for different periods?
    #dt_pivot = timelib.mean_date(datetime(2000,5,31), datetime(2009,5,31))
    #dt_pivot = timelib.mean_date(datetime(2009,5,31), datetime(2018,5,31))
    dt_pivot = timelib.dt2o(datetime(2009, 5, 31))
    intercept_filt = dt_pivot * (trend - trend_filt) + intercept
    intercept_fn=out_fn+'_intercept_%spx_filt.tif' % size
    print("Writing out: %s" % intercept_fn)
    iolib.writeGTiff(intercept_filt*365.25, intercept_fn, trend_ds)

    trend = trend_filt
    intercept = intercept_filt

for dt in dt_list:
    dt_o = timelib.dt2o(dt)
    z = trend*dt_o+intercept
    #Remove any values outsize global limits
    #Could also do local range filter here
    z = filtlib.range_fltr(z, zlim)
    print("Output pixel count: %s" % z.count())
    interp_fn=os.path.splitext(trend_fn)[0]+'_%s.tif' % dt.strftime('%Y%m%d')
    print("Writing out: %s" % interp_fn)
    iolib.writeGTiff(z, interp_fn, trend_ds)