Ejemplo n.º 1
0
def get_mask(ds, dem_fn):
    #Mask glaciers, vegetated slopes
    static_mask = dem_mask.get_lulc_mask(ds,
                                         mask_glaciers=True,
                                         filter='not_forest',
                                         bareground_thresh=60)
    #Mask glaciers only
    #static_mask = dem_mask.get_icemask(ds)
    #Top-of-atmosphere reflectance threshold (requires orthoimage and output from toa.sh)
    toa_fn = dem_mask.get_toa_fn(dem_fn)
    if toa_fn is not None:
        toa_ds = warplib.memwarp_multi_fn([
            toa_fn,
        ],
                                          res=ds,
                                          extent=ds,
                                          t_srs=ds,
                                          r='cubicspline')[0]
        toa_mask = dem_mask.get_toa_mask(toa_ds)
        static_mask = np.logical_and(static_mask, toa_mask)
    #Return final mask, ready to be applied
    return ~(static_mask)
Ejemplo n.º 2
0
Archivo: vmap.py Proyecto: whigg/vmap
def main():
    parser = getparser()
    args = parser.parse_args()
    if args.seedmode == 'existing_velocity':
        if args.vx_fn is None or args.vy_fn is None:
            parser.error('"-seedmode existing_velocity" requires "-vx_fn" and "-vy_fn"')

    print('\n%s' % datetime.now())
    print('%s UTC\n' % datetime.utcnow())

    align = args.align
    seedmode = args.seedmode
    spr = args.refinement
    erode = args.erode
    #Correlator tile timeout
    #With proper seeding, correlation should be very fast
    #timeout = 360 
    timeout = 1200 
    threads = args.threads

    kernel = (args.kernel, args.kernel)
    #SGM correlator
    if spr > 3:
        #kernel = (7,7)
        kernel = (11,11)
        erode = 0

    #Smooth the output F.tif 
    smoothF = args.filter 

    res = args.tr
    #Resample input to something easier to work with
    #res = 4.0

    #Open input files
    fn1 = args.fn1
    fn2 = args.fn2 

    if not iolib.fn_check(fn1) or not iolib.fn_check(fn2):
        sys.exit("Unable to locate input files")

    if args.outdir is not None:
        outdir = args.outdir
    else:
        outdir = '%s__%s_vmap_%sm_%ipx_spm%i' % (os.path.splitext(os.path.split(fn1)[1])[0], \
                os.path.splitext(os.path.split(fn2)[1])[0], res, kernel[0], spr)

    #Note, can encounter filename length issues in boost, just use vmap prefix
    outprefix = '%s/vmap' % (outdir)
    if not os.path.exists(outdir):
        os.makedirs(outdir)

    #Check to see if inputs have geolocation and projection information
    ds1 = iolib.fn_getds(fn1)
    ds2 = iolib.fn_getds(fn2)

    if geolib.srs_check(ds1) and geolib.srs_check(ds2):
        ds1_clip_fn = os.path.join(outdir, os.path.splitext(os.path.basename(fn1))[0]+'_warp.tif')
        ds2_clip_fn = os.path.join(outdir, os.path.splitext(os.path.basename(fn2))[0]+'_warp.tif')

        if not os.path.exists(ds1_clip_fn) or not os.path.exists(ds2_clip_fn):
            #This should write out files to new subdir
            ds1_clip, ds2_clip = warplib.diskwarp_multi_fn([fn1, fn2], extent='intersection', res=res, r='average', outdir=outdir)
            ds1_clip = None
            ds2_clip = None
            #However, if inputs have identical extent/res/proj, then link to original files
            if not os.path.exists(ds1_clip_fn):
                os.symlink(os.path.abspath(fn1), ds1_clip_fn)
            if not os.path.exists(ds2_clip_fn):
                os.symlink(os.path.abspath(fn2), ds2_clip_fn)
            align = 'None'

        #Mask support - limit correlation only to rock/ice surfaces, no water/veg
        #This masks input images - guarantee we won't waste time correlating over vegetation
        #TODO: Add support to load arbitrary raster or shp mask
        if args.mask_input:
            ds1_masked_fn = os.path.splitext(ds1_clip_fn)[0]+'_masked.tif'
            ds2_masked_fn = os.path.splitext(ds2_clip_fn)[0]+'_masked.tif'

            if not os.path.exists(ds1_masked_fn) or not os.path.exists(ds2_masked_fn):
                #Load NLCD or bareground mask
                from demcoreg.dem_mask import get_lulc_mask

                ds1_clip = iolib.fn_getds(ds1_clip_fn)
                lulc_mask_fn = os.path.join(outdir, 'lulc_mask.tif')
                #if not os.path.exists(nlcd_mask_fn):
                lulc_mask = get_lulc_mask(ds1_clip, mask_glaciers=False, filter='not_forest')
                iolib.writeGTiff(lulc_mask, lulc_mask_fn, ds1_clip) 
                ds1_clip = None

                #Now apply to original images 
                #This could be problematic for huge inputs, see apply_mask.py
                #lulc_mask = lulc_mask.astype(int)
                for fn in (ds1_clip_fn, ds2_clip_fn):
                    ds = iolib.fn_getds(fn)
                    a = iolib.ds_getma(ds)
                    a = np.ma.array(a, mask=~(lulc_mask))
                    if a.count() > 0:
                        out_fn = os.path.splitext(fn)[0]+'_masked.tif'
                        iolib.writeGTiff(a,out_fn,ds)
                        a = None
                    else:
                        sys.exit("No unmasked pixels over bare earth")
            ds1_clip_fn = ds1_masked_fn
            ds2_clip_fn = ds2_masked_fn
    else:
        ds1_clip_fn = fn1
        ds2_clip_fn = fn2
        #Now let user specify alignment methods as option - don't hardcode
        #align = 'Homography'
        #align = 'AffineEpipolar'
    ds1 = None
    ds2 = None

    #Should have extra kwargs option here
    stereo_opt = get_stereo_opt(threads=threads, kernel=kernel, timeout=timeout, \
            erode=erode, spr=spr, align=align)
    
    #Stereo arguments
    #Latest version of ASP should accept tif without camera models
    #stereo_args = [ds1_clip_fn, ds2_clip_fn, outprefix]
    #Nope - still need to provide dummy camera models, and they must be unique files
    #Use the dummy.tsai file bundled in the vmap repo
    dummy_tsai = os.path.join(os.path.split(os.path.realpath(__file__))[0], 'dummy.tsai')
    dummy_tsai2 = os.path.splitext(dummy_tsai)[0]+'2.tsai'
    if not os.path.exists(dummy_tsai2):
        dummy_tsai2 = os.symlink(dummy_tsai, os.path.splitext(dummy_tsai)[0]+'2.tsai')
    stereo_args = [ds1_clip_fn, ds2_clip_fn, dummy_tsai, dummy_tsai2, outprefix]

    #Run stereo_pprc
    if not os.path.exists(outprefix+'-R_sub.tif'):
        run_cmd('stereo_pprc', stereo_opt+stereo_args, msg='0: Preprocessing')
        #Copy proj info to outputs, this should happen automatically now?
        for ext in ('L', 'R', 'L_sub', 'R_sub', 'lMask', 'rMask', 'lMask_sub', 'rMask_sub'):
            geolib.copyproj(ds1_clip_fn, '%s-%s.tif' % (outprefix,ext))

    #Prepare seeding for stereo_corr
    #TODO: these are untested after refactoring
    if not os.path.exists(outprefix+'_D_sub.tif'):
        #Don't need to do anything for default seed-mode 1
        if seedmode == 'sparse_disp':
            #Sparse correlation of full-res images
            stereo_opt.extend(['--corr-seed-mode', '3'])
            sparse_disp_opt = []
            sparse_disp_opt.extend(['--Debug', '--coarse', '512', '--fine', '256', '--no_epipolar_fltr']) 
            sparse_disp_opt.extend(['-P', str(threads)])
            sparse_disp_args = [outprefix+'-L.tif', outprefix+'-R.tif', outprefix]
            run_cmd('sparse_disp', sparse_disp_opt+sparse_disp_args, msg='0.5: D_sub generation')
        elif seedmode == 'existing_velocity':
            #User-input low-res velocity maps for seeding
            #TODO: Add functions that fetch best available velocities for Ant/GrIS or user-defined low-res velocities
            #Automatically query GoLive velocities here
            vx_fn = args.vx_fn 
            vy_fn = args.vy_fn 
            #Check for existence

            #HMA seeding
            vdir = '/nobackup/deshean/rpcdem/hma/velocity_jpl_amaury_2013-2015'
            vx_fn = os.path.join(vdir, 'PKH_WRS2_B8_2013_2015_snr5_n1_r170_res12.x_vel.TIF')
            vy_fn = os.path.join(vdir, 'PKH_WRS2_B8_2013_2015_snr5_n1_r170_res12.y_vel.TIF')

            if os.path.exists(vx_fn) and os.path.exists(vy_fn):
                ds1_clip = iolib.fn_getds(ds1_clip_fn)
                ds1_res = geolib.get_res(ds1_clip, square=True)[0]

                #Compute L_sub res - use this for output dimensions
                L_sub_fn = outprefix+'-L_sub.tif' 
                L_sub_ds = gdal.Open(L_sub_fn)
                L_sub_x_scale = float(ds1_clip.RasterXSize) / L_sub_ds.RasterXSize
                L_sub_y_scale = float(ds1_clip.RasterYSize) / L_sub_ds.RasterYSize
                L_sub_scale = np.max([L_sub_x_scale, L_sub_y_scale])
                L_sub_res = ds1_res * L_sub_scale

                #Since we are likely upsampling here, use cubicspline
                vx_ds_clip, vy_ds_clip = warplib.memwarp_multi_fn([vx_fn, vy_fn], extent=ds1_clip, \
                        t_srs=ds1_clip, res=L_sub_res, r='cubicspline')

                ds1_clip = None

                #Get vx and vy arrays
                vx = iolib.ds_getma(vx_ds_clip)
                vy = iolib.ds_getma(vy_ds_clip)

                #Determine time interval between inputs
                #Use to scaling of known low-res velocities
                t_factor = get_t_factor_fn(ds1_clip_fn, ds2_clip_fn, ds=vx_ds_clip)

                if t_factor is not None:
                    #Compute expected offset in scaled pixels 
                    dx = (vx*t_factor)/L_sub_res
                    dy = (vy*t_factor)/L_sub_res
                    #Note: Joughin and Rignot's values are positive y up!
                    #ASP is positive y down, so need to multiply these values by -1
                    #dy = -(vy*t_factor)/L_sub_res

                    #Should smooth/fill dx and dy

                    #If absolute search window is only 30x30
                    #Don't seed, just use fixed search window 
                    #search_window_area_thresh = 900
                    search_window_area_thresh = 0 
                    search_window = np.array([dx.min(), dy.min(), dx.max(), dy.max()])
                    dx_p = calcperc(dx, perc=(0.5, 99.5))
                    dy_p = calcperc(dy, perc=(0.5, 99.5))
                    search_window = np.array([dx_p[0], dy_p[0], dx_p[1], dy_p[1]])
                    search_window_area = (search_window[2]-search_window[0]) * (search_window[3]-search_window[1])
                    if search_window_area < search_window_area_thresh:
                        stereo_opt.extend(['--corr-seed-mode', '0'])
                        stereo_opt.append('--corr-search')
                        stereo_opt.extend([str(x) for x in search_window])
                        #pad_perc=0.1
                        #stereo_opt.extend(['--corr-sub-seed-percent', str(pad_perc)]
                    #Otherwise, generate a D_sub map from low-res velocity
                    else:
                        stereo_opt.extend(['--corr-seed-mode', '3'])
                        #This is relative to the D_sub scaled disparities
                        d_sub_fn = L_sub_fn.split('-L_sub')[0]+'-D_sub.tif' 
                        gen_d_sub(d_sub_fn, dx, dy)

    #If the above didn't generate a D_sub.tif for seeding, run stereo_corr to generate Low-res D_sub.tif
    if not os.path.exists(outprefix+'-D_sub.tif'):
        newopt = ['--compute-low-res-disparity-only',]
        run_cmd('stereo_corr', newopt+stereo_opt+stereo_args, msg='1.1: Low-res Correlation')
    #Copy projection info to D_sub
    geolib.copyproj(outprefix+'-L_sub.tif', outprefix+'-D_sub.tif')
      
    #Mask D_sub to limit correlation over bare earth surfaces
    #This _should_ be a better approach than masking input images, but stereo_corr doesn't honor D_sub
    #Still need to mask input images before stereo_pprc
    #Left this in here for reference, or if this changes in ASP
    if False:
        D_sub_ds = gdal.Open(outprefix+'-D_sub.tif', gdal.GA_Update)

        #Mask support - limit correlation only to rock/ice surfaces, no water/veg
        from demcoreg.dem_mask import get_nlcd, mask_nlcd
        nlcd_fn = get_nlcd()
        nlcd_ds = warplib.diskwarp_multi_fn([nlcd_fn,], extent=D_sub_ds, res=D_sub_ds, t_srs=D_sub_ds, r='near', outdir=outdir)[0]
        #validmask = mask_nlcd(nlcd_ds, valid='rock+ice')
        validmask = mask_nlcd(nlcd_ds, valid='not_forest', mask_glaciers=False)
        nlcd_mask_fn = os.path.join(outdir, 'nlcd_validmask.tif')
        iolib.writeGTiff(validmask, nlcd_mask_fn, nlcd_ds) 

        #Now apply to D_sub (band 3 is valid mask)
        #validmask = validmask.astype(int)
        for b in (1,2,3):
            dsub = iolib.ds_getma(D_sub_ds, b)
            dsub = np.ma.array(dsub, mask=~(validmask))
            D_sub_ds.GetRasterBand(b).WriteArray(dsub.filled())
        D_sub_ds = None

    #OK, finally run stereo_corr full-res integer correlation with appropriate seeding
    if not os.path.exists(outprefix+'-D.tif'):
        run_cmd('stereo_corr', stereo_opt+stereo_args, msg='1: Correlation')
        geolib.copyproj(ds1_clip_fn, outprefix+'-D.tif')

    #Run stereo_rfne
    if spr > 0:
        if not os.path.exists(outprefix+'-RD.tif'):
            run_cmd('stereo_rfne', stereo_opt+stereo_args, msg='2: Refinement')
            geolib.copyproj(ds1_clip_fn, outprefix+'-RD.tif')
        d_fn = make_ln(outdir, outprefix, '-RD.tif')
    else:
        ln_fn = outprefix+'-RD.tif'
        if os.path.lexists(ln_fn):
            os.remove(ln_fn)
        os.symlink(os.path.split(outprefix)[1]+'-D.tif', ln_fn)

    #Run stereo_fltr
    if not os.path.exists(outprefix+'-F.tif'):
        run_cmd('stereo_fltr', stereo_opt+stereo_args, msg='3: Filtering')
        geolib.copyproj(ds1_clip_fn, outprefix+'-F.tif')

    d_fn = make_ln(outdir, outprefix, '-F.tif')

    if smoothF and not os.path.exists(outprefix+'-F_smooth.tif'):
        print('Smoothing F.tif')
        from pygeotools.lib import filtlib 
        #Fill holes and smooth F
        F_fill_fn = outprefix+'-F_smooth.tif'
        F_ds = gdal.Open(outprefix+'-F.tif', gdal.GA_ReadOnly)
        #import dem_downsample_fill
        #F_fill_ds = dem_downsample_fill.gdalfill_ds(F_fill_ds)
        print('Creating F_smooth.tif')
        F_fill_ds = iolib.gtif_drv.CreateCopy(F_fill_fn, F_ds, 0, options=iolib.gdal_opt)
        F_ds = None
        for n in (1, 2):
            print('Smoothing band %i' % n)
            b = F_fill_ds.GetRasterBand(n)
            b_fill_bma = iolib.b_getma(b)
            #b_fill_bma = iolib.b_getma(dem_downsample_fill.gdalfill(b))
            #Filter extreme values (careful, could lose areas of valid data with fastest v)
            #b_fill_bma = filtlib.perc_fltr(b_fill_bma, perc=(0.01, 99.99))
            #These filters remove extreme values and fill data gaps
            #b_fill_bma = filtlib.median_fltr_skimage(b_fill_bma, radius=7, erode=0)
            #b_fill_bma = filtlib.median_fltr(b_fill_bma, fsize=7, origmask=True)
            #Gaussian filter
            b_fill_bma = filtlib.gauss_fltr_astropy(b_fill_bma, size=9)
            b.WriteArray(b_fill_bma)
        F_fill_ds = None
        d_fn = make_ln(outdir, outprefix, '-F_smooth.tif')

    print('\n%s' % datetime.now())
    print('%s UTC\n' % datetime.utcnow())

    #If time interval is specified, convert pixel displacements to rates
    if args.dt != 'none':
        #Check if vm.tif already exists
        #Should probably just overwrite by default
        #if os.path.exists(os.path.splitext(d_fn)[0]+'_vm.tif'):
        #    print("\nFound existing velocity magnitude map!\n"
        #else:
        #Generate output velocity products and figure
        #Requires that vmap repo is in PATH
        cmd = ['disp2v.py', d_fn]
        #Note: this will attempt to automatically determine control surfaces
        #disp2v.py will accept arbitrary mask, could pass through here
        if args.remove_offsets:
            cmd.append('-remove_offsets')
        cmd.extend(['-dt', args.dt])
        print("Converting disparities to velocities")
        print(cmd)
        subprocess.call(cmd)
Ejemplo n.º 3
0
Archivo: disp2v.py Proyecto: whigg/vmap
def main():
    parser = getparser()
    args = parser.parse_args()

    t_unit = args.dt
    plot = args.plot
    remove_offsets = args.remove_offsets
    mask_fn = args.mask_fn
    if mask_fn is not None:
        remove_offsets = True

    #Input is 3-band disparity map, extract bands directly
    src_fn = args.disp_fn
    if not iolib.fn_check(src_fn):
        sys.exit("Unable to locate input file: %s" % src_fn)

    src_ds = iolib.fn_getds(src_fn)
    if src_ds.RasterCount != 3:
        sys.exit("Input file must be ASP disparity map (3 bands: x, y, mask)")
    #Extract pixel resolution
    h_res, v_res = geolib.get_res(src_ds)

    #Horizontal scale factor
    #If running on disparity_view output (gdal_translate -outsize 5% 5% F.tif F_5.tif)
    #h_res /= 20
    #v_res /= 20

    #Load horizontal and vertical disparities
    h = iolib.ds_getma(src_ds, bnum=1)
    v = iolib.ds_getma(src_ds, bnum=2)

    #ASP output has northward motion as negative values in band 2
    v *= -1

    t1, t2 = timelib.fn_getdatetime_list(src_fn)
    dt = t2 - t1
    #Default t_factor is in 1/years
    t_factor = timelib.get_t_factor(t1, t2)

    #Input timestamp arrays if inputs are mosaics
    if False:
        t1_fn = ''
        t2_fn = ''
        if os.path.exists(t1_fn) and os.path.exists(t2_fn):
            t_factor = timelib.get_t_factor_fn(t1_fn, t2_fn)
        if t_factor is None:
            sys.exit("Unable to determine input timestamps")

    if t_unit == 'day':
        t_factor *= 365.25

    print("Input dates:")
    print(t1)
    print(t2)
    print(dt)
    print(t_factor, t_unit)

    #Scale values for polar stereographic distortion
    srs = geolib.get_ds_srs(src_ds)
    proj_scale_factor = 1.0
    #Want to scale to get correct distances for polar sterographic
    if srs.IsSame(geolib.nps_srs) or srs.IsSame(geolib.sps_srs):
        proj_scale_factor = geolib.scale_ps_ds(src_ds)

    #Convert disparity values in pixels to m/t_unit
    h_myr = h * h_res * proj_scale_factor / t_factor
    h = None
    v_myr = v * v_res * proj_scale_factor / t_factor
    v = None

    #Velocity Magnitude
    m = np.ma.sqrt(h_myr**2 + v_myr**2)
    print("Velocity Magnitude stats")
    malib.print_stats(m)

    #Remove x and y offsets over control surfaces
    offset_str = ''
    if remove_offsets:
        if mask_fn is None:
            from demcoreg.dem_mask import get_lulc_mask
            print(
                "\nUsing demcoreg to prepare mask of stable control surfaces\n"
            )
            mask = get_lulc_mask(src_ds,
                                 mask_glaciers=True,
                                 filter='rock+ice+water')
        else:
            print("\nWarping input raster mask")
            #This can be from previous dem_mask.py run (e.g. *rockmask.tif)
            mask_ds = warplib.memwarp_multi_fn([
                mask_fn,
            ],
                                               res=src_ds,
                                               extent=src_ds,
                                               t_srs=src_ds)[0]
            mask = iolib.ds_getma(mask_ds)
            #The default from ds_getma is a masked array, so need to isolate boolean mask
            #Assume input is 0 for masked, 1 for unmasked (valid control surface)
            mask = mask.filled().astype('bool')
            #This should work, as the *rockmask.py is 1 for unmasked, 0 for masked, with ndv=0
            #mask = np.ma.getmaskarray(mask)
            #Vector mask - untested
            if os.path.splitext(mask_fn)[1] == 'shp':
                mask = geolib.shp2array(mask_fn, src_ds)

        print("\nRemoving median x and y offset over static control surfaces")
        h_myr_count = h_myr.count()
        h_myr_static_count = h_myr[mask].count()
        h_myr_med = malib.fast_median(h_myr[mask])
        v_myr_med = malib.fast_median(v_myr[mask])
        h_myr_mad = malib.mad(h_myr[mask])
        v_myr_mad = malib.mad(v_myr[mask])
        print("Static pixel count: %i (%0.1f%%)" %
              (h_myr_static_count,
               100 * float(h_myr_static_count) / h_myr_count))
        print("median (+/-NMAD)")
        print("x velocity offset: %0.2f (+/-%0.2f) m/%s" %
              (h_myr_med, h_myr_mad, t_unit))
        print("y velocity offset: %0.2f (+/-%0.2f) m/%s" %
              (v_myr_med, v_myr_mad, t_unit))
        h_myr -= h_myr_med
        v_myr -= v_myr_med
        offset_str = '_offsetcorr_h%0.2f_v%0.2f' % (h_myr_med, v_myr_med)
        #Velocity Magnitude
        m = np.ma.sqrt(h_myr**2 + v_myr**2)
        print("Velocity Magnitude stats after correction")
        malib.print_stats(m)

    if plot:
        fig_fn = os.path.splitext(src_fn)[0] + '.png'
        label = 'Velocity (m/%s)' % t_unit
        f, ax = make_plot(m, fig_fn, label)
        plotvec(h_myr, v_myr)
        plt.tight_layout()
        plt.savefig(fig_fn,
                    dpi=300,
                    bbox_inches='tight',
                    pad_inches=0,
                    edgecolor='none')

    print("Writing out files")
    gt = src_ds.GetGeoTransform()
    proj = src_ds.GetProjection()
    dst_fn = os.path.splitext(src_fn)[0] + '_vm%s.tif' % offset_str
    iolib.writeGTiff(m, dst_fn, create=True, gt=gt, proj=proj)
    dst_fn = os.path.splitext(src_fn)[0] + '_vx%s.tif' % offset_str
    iolib.writeGTiff(h_myr, dst_fn, create=True, gt=gt, proj=proj)
    dst_fn = os.path.splitext(src_fn)[0] + '_vy%s.tif' % offset_str
    iolib.writeGTiff(v_myr, dst_fn, create=True, gt=gt, proj=proj)
    src_ds = None