Exemple #1
0
def main():
    parser = getparser()
    #Create dictionary of arguments
    args = vars(parser.parse_args())
    
    #Want to enable -full when -of is specified, probably a fancy way to do this with argparse
    if args['of']:
        args['full'] = True

    #Note, imshow has many interpolation types:
    #'none', 'nearest', 'bilinear', 'bicubic', 'spline16', 'spline36', 'hanning', 'hamming', 
    #'hermite', 'kaiser', 'quadric', 'catrom', 'gaussian', 'bessel', 'mitchell', 'sinc', 'lanczos'
    #{'interpolation':'bicubic', 'aspect':'auto'}
    #args['imshow_kwargs']={'interpolation':'bicubic'}
    args['imshow_kwargs']={'interpolation':'none'}

    if args['clipped'] and args['overlay'] is None:
        sys.exit("Must specify an overlay filename with option 'clipped'")

    #Set this as the background numpy array
    args['bg'] = None

    if args['shp'] is not None:
        print args['shp']

    if args['link']:
        fig = plt.figure(0)
        n_ax = len(args['filelist'])
        src_ds_list = [gdal.Open(fn) for fn in args['filelist']]
        t_srs = geolib.get_ds_srs(src_ds_list[0])
        res_stats = geolib.get_res_stats(src_ds_list, t_srs=t_srs)
        #Use min res
        res = res_stats[0]
        extent = geolib.ds_geom_union_extent(src_ds_list, t_srs=t_srs)
        #print res, extent

    for n,fn in enumerate(args['filelist']):

        if not iolib.fn_check(fn):
            print 'Unable to open input file: %s' % fn
            continue

        #Note: this won't work if img1 has 1 band and img2 has 3 bands
        #Hack for now
        if not args['link']:
            fig = plt.figure(n)
            n_ax = 1
        
        #fig.set_facecolor('black')
        fig.set_facecolor('white')
        fig.canvas.set_window_title(os.path.split(fn)[1])
        #fig.suptitle(os.path.split(fn)[1], fontsize=10)

        #Note: warplib SHOULD internally check to see if extent/resolution/projection are identical
        #This eliminates the need for a clipped flag
        #If user has already warped the background and source data 
        if args['overlay']:
            if args['clipped']: 
                src_ds = gdal.Open(fn, gdal.GA_ReadOnly)
                #Only load up the bg array once
                if args['bg'] is None:
                    #Need to check that background fn exists
                    print "%s background" % args['overlay']
                    bg_ds = gdal.Open(args['overlay'], gdal.GA_ReadOnly)
                    #Check image dimensions
                    args['bg'] = get_bma(bg_ds, 1, args['full'])
            else:
                #Clip/warp background dataset to match overlay dataset 
                #src_ds, bg_ds = warplib.memwarp_multi_fn([fn, args['overlay']], extent='union')
                src_ds, bg_ds = warplib.memwarp_multi_fn([fn, args['overlay']], extent='first')
                #src_ds, bg_ds = warplib.memwarp_multi_fn([fn, args['overlay']], res='min', extent='first')
                #Want to load up the unique bg array for each input
                args['bg'] = get_bma(bg_ds, 1, args['full'])
        else:
            src_ds = gdal.Open(fn, gdal.GA_ReadOnly)
            if args['link']:
                #Not sure why, but this still warps all linked ds, even when identical res/extent/srs
                #src_ds = warplib.warp(src_ds, res=res, extent=extent, t_srs=t_srs)
                src_ds = warplib.memwarp_multi([src_ds,], res=res, extent=extent, t_srs=t_srs)[0]

        cbar_kwargs={'extend':'both', 'orientation':'vertical', 'shrink':0.7, 'fraction':0.12, 'pad':0.02}

        nbands = src_ds.RasterCount
        b = src_ds.GetRasterBand(1)
        dt = gdal.GetDataTypeName(b.DataType)
        #Eventually, check dt of each band
        print 
        print "%s (%i bands)" % (fn, nbands)
        #Singleband raster
        if (nbands == 1):
            if args['cmap'] is None:
                #Special case to handle ASP float32 grayscale data
                if '-L_sub' in fn or '-R_sub' in fn:
                    args['cmap'] = 'gray'
                else:
                    if (dt == 'Float64') or (dt == 'Float32') or (dt == 'Int32'):
                        args['cmap'] = 'cpt_rainbow'
                    #This is for WV images
                    elif (dt == 'UInt16'):
                        args['cmap'] = 'gray'
                    elif (dt == 'Byte'):
                        args['cmap'] = 'gray'
                    else:
                        args['cmap'] = 'cpt_rainbow'
                """
                if 'count' in fn:
                    args['clim_perc'] = (0,100)
                    cbar_kwargs['extend'] = 'neither'
                    args['cmap'] = 'cpt_rainbow'
                if 'mask' in fn:
                    args['clim'] = (0, 1)
                    #Could be (0, 255)
                    #args['clim_perc'] = (0,100)
                    #Want absolute clim of 0, then perc of 100
                    cbar_kwargs['extend'] = 'neither'
                    args['cmap'] = 'gray'
                """
            args['cbar_kwargs'] = cbar_kwargs
            bma = get_bma(src_ds, 1, args['full'])   
            #Note n+1 here ensures we're assigning subplot correctly here (n is 0-relative, subplot is 1)
            bma_fig(fig, bma, n_subplt=n_ax, subplt=n+1, ds=src_ds, **args)
        #3-band raster, likely disparity map
        #This doesn't work when alpha band is present
        elif (nbands == 3) and (dt == 'Byte'):
            #For some reason, tifs are vertically flipped
            if (os.path.splitext(fn)[1] == '.tif'):
                args['imshow_kwargs']['origin'] = 'lower'
            #Use gdal dataset here instead of imread(fn)?
            imgplot = plt.imshow(plt.imread(fn), **args['imshow_kwargs'])
            pltlib.hide_ticks(imgplot.axes)
        #Handle the 3-band disparity map case here
        #elif ((dt == 'Float32') or (dt == 'Int32')):
        else: 
            if args['cmap'] is None:
                args['cmap'] = 'cpt_rainbow'
            bn = 1
            while bn <= nbands:
                bma = get_bma(src_ds, bn, args['full'])
                bma_fig(fig, bma, n_subplt=nbands, subplt=bn, ds=src_ds, **args)
                bn += 1
        #Want to be better about this else case - lazy for now
        #else:
        #    bma = get_bma(src_ds, 1, args['full'])
        #    bma_fig(fig, bma, **args)

        ts = timelib.fn_getdatetime_list(fn) 

        if ts:
            print "Timestamp list: ", ts

        """
        if len(ts) == 1:
            plt.title(ts[0].date())
        elif len(ts) == 2:
            plt.title("%s to %s" % (ts[0].date(), ts[1].date()))
        """
            
        plt.tight_layout()
        
        #Write out the file 
        #Note: make sure display is local for savefig
        if args['of']:
            outf = str(os.path.splitext(fn)[0])+'_fig.'+args['of'] 
            #outf = str(os.path.splitext(fn)[0])+'_'+str(os.path.splitext(args['overlay'])[0])+'_fig.'+args['of'] 

            #Note: need to account for colorbar (12%) and title - some percentage of axes beyond bma dimensions
            #Should specify minimum text size for output

            max_size = np.array((10.0,10.0))
            max_dpi = 300.0
            #If both outsize and dpi are specified, don't try to change, just make the figure
            if (args['outsize'] is None) and (args['dpi'] is None):
                args['dpi'] = 150.0

            #Unspecified out figure size for a given dpi
            if (args['outsize'] is None) and (args['dpi'] is not None):
                args['outsize'] = np.array(bma.shape[::-1])/args['dpi']
                if np.any(np.array(args['outsize']) > max_size):
                    args['outsize'] = max_size
            #Specified output figure size, no specified dpi 
            elif (args['outsize'] is not None) and (args['dpi'] is None):
                args['dpi'] = np.min([np.max(np.array(bma.shape[::-1])/np.array(args['outsize'])), max_dpi])
                
            print
            print "Saving output figure:"
            print "Filename: ", outf
            print "Size (in): ", args['outsize']
            print "DPI (px/in): ", args['dpi']
            print "Input dimensions (px): ", bma.shape[::-1]
            print "Output dimensions (px): ", tuple(np.array(args['outsize'])*args['dpi'])
            print

            fig.set_size_inches(args['outsize'])
            #fig.set_size_inches(54.427, 71.87)
            #fig.set_size_inches(40, 87)
            fig.savefig(outf, dpi=args['dpi'], bbox_inches='tight', pad_inches=0, facecolor=fig.get_facecolor(), edgecolor='none')
    #Show the plot - want to show all at once
    if not args['of']: 
        plt.show()
Exemple #2
0
    #z2_fn = '/nobackup/deshean/hma/hma1_2016dec22/hma_8m_tile/hma_8m.vrt'
    #z2_fn = os.path.join(topdir,'hma/hma1_2016dec22/hma_8m_tile/hma_8m.vrt')
    #z2_fn = os.path.join(topdir,'hma/hma1_2016dec22/hma_8m_tile_round2_20170220/hma_8m_round2.vrt')
    #z2_fn = os.path.join(topdir,'hma/hma_8m_mos_20170410/hma_8m.vrt')
    z2_fn = os.path.join(topdir,
                         'hma/mos/%s/mos_8m/%s_8m.vrt' % (mosdir, mosdir))
    #z2_date = 2015.0
    z2_date = datetime(2015, 1, 1)
elif site == 'other':
    outdir = os.path.join(topdir, 'mb')
    aea_srs = geolib.conus_aea_srs
    #glac_shp_fn = '/Users/dshean/data/conus_glacierpoly_24k/rainier_24k_1970-2015_mb_aea.shp'
    #glac_shp_fn = '/Users/dshean/data/conus_glacierpoly_24k/conus_glacierpoly_24k_aea.shp'
    glac_shp_fn = '/Users/dshean/data/conus_glacierpoly_24k/conus_glacierpoly_24k_32610_scg_2008_aea.shp'
    z1_fn = sys.argv[1]
    z1_date = timelib.mean_date(timelib.fn_getdatetime_list(z1_fn))
    z2_fn = sys.argv[2]
    z2_date = timelib.mean_date(timelib.fn_getdatetime_list(z2_fn))
else:
    sys.exit()

ts = datetime.now().strftime('%Y%m%d_%H%M')
out_fn = '%s_mb_%s.csv' % (site, ts)
out_fn = os.path.join(outdir, out_fn)

#Write out temporary file line by line, incase processes interrupted
import csv
f = open(os.path.splitext(out_fn)[0] + '_temp.csv', 'wb')
writer = csv.writer(f)

#List to hold output
Exemple #3
0
import numpy as np
from pygeotools.lib import timelib

"""
Create clean filenames with center date for velocity maps generated from WV DEMs
"""

#Rename all
#mkdir vm
#for i in */2*vm.tif; do ln -s ../$i vm/$(~/src/vmap/vmap/wv_cdate.py $i)_vm.tif; done
#parallel "fn=$(~/src/vmap/vmap/wv_cdate.py {}); ln -s ../{} vm/${fn}_vm.tif" ::: */2*vm.tif

import os

fn = sys.argv[1]
dt_list = timelib.fn_getdatetime_list(fn)
dt_list = np.sort(dt_list)
dtmin = dt_list.min()
dtmax = dt_list.max()
c_date = timelib.mean_date(dt_list)
ndays = timelib.dt_ptp(dt_list)
ndays = timelib.dt_ptp((dtmin, dtmax))
nyears = ndays/365.25
#s = '%s_%04idays_%s-%s' % (c_date.strftime('%Y%m%d'), ndays, dtmin.strftime('%Y%m%d'), dtmax.strftime('%Y%m%d'))
#s = '%s_%s-%s_%0.2fyr' % (c_date.strftime('%Y%m%d'), dtmin.strftime('%Y%m%d'), dtmax.strftime('%Y%m%d'), nyears)
#s = '%s_%s-%s_%04iday' % (c_date.strftime('%Y%m%d'), dtmin.strftime('%Y%m%d'), dtmax.strftime('%Y%m%d'), ndays)
#Added %H%M here, as there were some inputs acquired on same days
s = '%s__%s-%s__%04iday' % (c_date.strftime('%Y%m%d_%H%M'), dtmin.strftime('%Y%m%d_%H%M'), dtmax.strftime('%Y%m%d_%H%M'), ndays)
print(s)

#outdir="vm"
Exemple #4
0
def main():

    if len(sys.argv) != 2:
        sys.exit("Usage: %s dz.tif" % os.path.basename(sys.argv[0]))

    #This is mean density for N Cascades snow
    #rho = 0.5
    #Density of pure ice
    rho = 0.917

    #Clip negative values to 0
    filt = False

    src_fn = sys.argv[1]
    src_ds = iolib.fn_getds(src_fn)
    res = geolib.get_res(src_ds, square=True)[0]
    bma = iolib.ds_getma(src_ds)

    #Attempt to extract t1 and t2 from input filename
    ts = timelib.fn_getdatetime_list(src_fn)
    #Hardcode timestamps
    #ts = [datetime.datetime(2013,9,10), datetime.datetime(2014,5,14)]

    dt_yr = None
    if len(ts) == 2:
        dt = ts[1] - ts[0]
        year = datetime.timedelta(days=365.25)
        dt_yr = dt.total_seconds() / year.total_seconds()

    #Can add filter here to remove outliers, perc_fltr(0.01, 99.9)
    if filt:
        mask = np.ma.getmaskarray(bma)
        bma[bma < 0] = 0
        bma = np.ma.array(bma, mask=mask)

    #Print out stats
    print('\n')
    stats = malib.print_stats(bma)

    count = stats[0]
    area = res**2 * count
    mean = stats[3]
    med = stats[5]

    s_m3 = np.ma.sum(bma) * res**2
    s_km3 = s_m3 / 1E9
    s_mwe = mean * rho
    s_gt = s_km3 * rho
    s_mm = s_gt / 374

    if dt_yr is not None:
        print("%s to %s: %0.2f yr" % (ts[0], ts[1], dt_yr))
        print("%0.0f m^3 (%0.0f m^3/yr)" % (s_m3, s_m3 / dt_yr))
        print("%0.3f km^3 (%0.3f km^3/yr)" % (s_km3, s_km3 / dt_yr))
        print("Density: %0.3f g/cc" % rho)
        print("%0.3f GT (%0.3f GT/yr)" % (s_gt, s_gt / dt_yr))
        print("%0.6f mm SLR (%0.6f mm/yr)" % (s_mm, s_mm / dt_yr))
        print("%0.3f m.w.e. (%0.3f m.w.e./yr)" % (s_mwe, s_mwe / dt_yr))
    else:
        print("Area: %0.2f km2" % (area / 1E6))
        print("%0.0f m^3" % s_m3)
        print("%0.3f km^3" % s_km3)
        print("Density: %0.3f g/cc" % rho)
        print("%0.3f GT" % s_gt)
        print("%0.6f mm SLR" % s_mm)
        print("%0.3f m.w.e." % s_mwe)
Exemple #5
0
#! /usr/bin/env python
"""
Create seasonal velocity composites for specified input relative date ranges
"""

import sys
import subprocess
from pygeotools.lib import timelib
from datetime import datetime, timedelta

fn_list = sys.argv[1:]
dt_list = [timelib.fn_getdatetime_list(fn) for fn in fn_list]

season = 'winter'
#season='summer'

#Define relative start/end dates for each season
if season == 'winter':
    dt_min = (10, 1)
    dt_max = (6, 30)
else:
    dt_min = (6, 1)
    dt_max = (10, 31)

#Dummy variable for year to compute day of year
yr = 2016
dt_min_doy = timelib.dt2doy(datetime(yr, *dt_min))
dt_max_doy = timelib.dt2doy(datetime(yr, *dt_max))
if dt_min_doy > dt_max_doy:
    max_delta = datetime(yr + 1, *dt_max) - datetime(yr, *dt_min)
else:
Exemple #6
0
def main():
    parser = getparser()
    #Create dictionary of arguments
    args = vars(parser.parse_args())

    #Want to enable -full when -of is specified, probably a fancy way to do this with argparse
    if args['of']:
        args['full'] = True

    args['imshow_kwargs'] = pltlib.imshow_kwargs

    #Need to implement better extent handling for link and overlay
    #Can use warplib extent parsing
    extent = 'first'
    #extent = 'union'

    #Should accept 'ts' or 'fn' or string here, default is 'ts'
    #Can also accept list for subplots
    title = args['title']

    if args['link']:
        fig = plt.figure(0)
        n_ax = len(args['filelist'])
        src_ds_list = [gdal.Open(fn) for fn in args['filelist']]
        t_srs = geolib.get_ds_srs(src_ds_list[0])
        res_stats = geolib.get_res_stats(src_ds_list, t_srs=t_srs)
        #Use min res
        res = res_stats[0]
        extent = 'intersection'
        extent = geolib.ds_geom_union_extent(src_ds_list, t_srs=t_srs)
        #extent = geolib.ds_geom_intersection_extent(src_ds_list, t_srs=t_srs)
        #print(res, extent)

    for n, fn in enumerate(args['filelist']):
        if not iolib.fn_check(fn):
            print('Unable to open input file: %s' % fn)
            continue

        if title == 'ts':
            ts = timelib.fn_getdatetime_list(fn)

            if ts:
                print("Timestamp list: ", ts)
                if len(ts) == 1:
                    args['title'] = ts[0].date()
                elif len(ts) > 1:
                    args['title'] = "%s to %s" % (ts[0].date(), ts[1].date())
            else:
                print("Unable to extract timestamp")
                args['title'] = None
        elif title == 'fn':
            args['title'] = fn

        #if title is not None:
        #    plt.title(title, fontdict={'fontsize':12})

        #Note: this won't work if img1 has 1 band and img2 has 3 bands
        #Hack for now
        if not args['link']:
            fig = plt.figure(n)
            n_ax = 1

        #fig.set_facecolor('black')
        fig.set_facecolor('white')
        fig.canvas.set_window_title(os.path.split(fn)[1])
        #fig.suptitle(os.path.split(fn)[1], fontsize=10)

        if args['overlay']:
            #Should automatically search for shaded relief with same base fn
            #bg_fn = os.path.splitext(fn)[0]+'_hs_az315.tif'
            #Clip/warp background dataset to match overlay dataset
            src_ds, bg_ds = warplib.memwarp_multi_fn([fn, args['overlay']],
                                                     extent=extent,
                                                     res='max')
            #Want to load up the unique bg array for each input
            args['bg'] = get_bma(bg_ds, 1, args['full'])
        else:
            src_ds = gdal.Open(fn, gdal.GA_ReadOnly)
            if args['link']:
                src_ds = warplib.memwarp_multi([
                    src_ds,
                ],
                                               res=res,
                                               extent=extent,
                                               t_srs=t_srs)[0]

        args['cbar_kwargs'] = pltlib.cbar_kwargs
        if args['no_cbar']:
            args['cbar_kwargs'] = None

        nbands = src_ds.RasterCount
        b = src_ds.GetRasterBand(1)
        dt = gdal.GetDataTypeName(b.DataType)
        #Eventually, check dt of each band
        print("%s (%i bands)" % (fn, nbands))
        #Singleband raster
        if (nbands == 1):
            if args['cmap'] is None:
                #Special case to handle ASP float32 grayscale data
                if '-L_sub' in fn or '-R_sub' in fn:
                    args['cmap'] = 'gray'
                else:
                    if (dt == 'Float64') or (dt == 'Float32') or (dt
                                                                  == 'Int32'):
                        args['cmap'] = 'cpt_rainbow'
                    #This is for WV images
                    elif (dt == 'UInt16'):
                        args['cmap'] = 'gray'
                    elif (dt == 'Byte'):
                        args['cmap'] = 'gray'
                    else:
                        args['cmap'] = 'cpt_rainbow'
                """
                if 'count' in fn:
                    args['clim_perc'] = (0,100)
                    cbar_kwargs['extend'] = 'neither'
                    args['cmap'] = 'cpt_rainbow'
                if 'mask' in fn:
                    args['clim'] = (0, 1)
                    #Could be (0, 255)
                    #args['clim_perc'] = (0,100)
                    #Want absolute clim of 0, then perc of 100
                    cbar_kwargs['extend'] = 'neither'
                    args['cmap'] = 'gray'
                """
            bma = get_bma(src_ds, 1, args['full'])
            if args['invert']:
                bma *= -1
            #Note n+1 here ensures we're assigning subplot correctly here (n is 0-relative, subplot is 1)
            bma_fig(fig, bma, n_subplt=n_ax, subplt=n + 1, ds=src_ds, **args)
        #3-band raster, likely disparity map
        #This doesn't work when alpha band is present
        elif (nbands == 3) and (dt == 'Byte'):
            #For some reason, tifs are vertically flipped
            if (os.path.splitext(fn)[1] == '.tif'):
                args['imshow_kwargs']['origin'] = 'lower'
            #Use gdal dataset here instead of imread(fn)?
            imgplot = plt.imshow(plt.imread(fn), **args['imshow_kwargs'])
            pltlib.hide_ticks(imgplot.axes)
        #Handle the 3-band disparity map case here
        #elif ((dt == 'Float32') or (dt == 'Int32')):
        else:
            if args['cmap'] is None:
                args['cmap'] = 'cpt_rainbow'
            bn = 1
            while bn <= nbands:
                bma = get_bma(src_ds, bn, args['full'])
                bma_fig(fig,
                        bma,
                        n_subplt=nbands,
                        subplt=bn,
                        ds=src_ds,
                        **args)
                bn += 1
        #Want to be better about this else case - lazy for now
        #else:
        #    bma = get_bma(src_ds, 1, args['full'])
        #    bma_fig(fig, bma, **args)

        plt.tight_layout()

        #Write out the file
        #Note: make sure display is local for savefig
        if args['of']:
            outf = str(os.path.splitext(fn)[0]) + '_fig.' + args['of']
            #outf = str(os.path.splitext(fn)[0])+'_'+str(os.path.splitext(args['overlay'])[0])+'_fig.'+args['of']

            #Note: need to account for colorbar (12%) and title - some percentage of axes beyond bma dimensions
            #Should specify minimum text size for output

            max_size = np.array((10.0, 10.0))
            max_dpi = 300.0
            #If both outsize and dpi are specified, don't try to change, just make the figure
            if (args['outsize'] is None) and (args['dpi'] is None):
                args['dpi'] = 150.0

            #Unspecified out figure size for a given dpi
            if (args['outsize'] is None) and (args['dpi'] is not None):
                args['outsize'] = np.array(bma.shape[::-1]) / args['dpi']
                if np.any(np.array(args['outsize']) > max_size):
                    args['outsize'] = max_size
            #Specified output figure size, no specified dpi
            elif (args['outsize'] is not None) and (args['dpi'] is None):
                args['dpi'] = np.min([
                    np.max(
                        np.array(bma.shape[::-1]) / np.array(args['outsize'])),
                    max_dpi
                ])

            print()
            print("Saving output figure:")
            print("Filename: ", outf)
            print("Size (in): ", args['outsize'])
            print("DPI (px/in): ", args['dpi'])
            print("Input dimensions (px): ", bma.shape[::-1])
            print("Output dimensions (px): ",
                  tuple(np.array(args['outsize']) * args['dpi']))
            print()

            fig.set_size_inches(args['outsize'])
            #fig.set_size_inches(54.427, 71.87)
            #fig.set_size_inches(40, 87)
            fig.savefig(outf,
                        dpi=args['dpi'],
                        bbox_inches='tight',
                        pad_inches=0,
                        facecolor=fig.get_facecolor(),
                        edgecolor='none')
            #fig.savefig(outf, dpi=args['dpi'], facecolor=fig.get_facecolor(), edgecolor='none')
    #Show the plot - want to show all at once
    if not args['of']:
        plt.show()
Exemple #7
0
def main():
    parser = getparser()
    args = parser.parse_args()

    fn = args.fn

    #This is mean density for N Cascades snow
    #rho = 0.5
    #Density of pure ice
    rho = args.rho
    #If number is in kg/m^3 rather than g/cc
    if rho > 10.:
        rho /= 1000.

    #Clip negative values to 0
    filt = False

    src_ds = iolib.fn_getds(fn)
    res = geolib.get_res(src_ds, square=True)[0]
    bma = iolib.ds_getma(src_ds)

    #Attempt to extract t1 and t2 from input filename
    ts = timelib.fn_getdatetime_list(fn)
    #Hardcode timestamps
    #ts = [datetime.datetime(2013,9,10), datetime.datetime(2014,5,14)]

    dt_yr = None
    if len(ts) == 2:
        dt = ts[1] - ts[0]
        year = datetime.timedelta(days=365.25)
        dt_yr = dt.total_seconds() / year.total_seconds()

    #Can add filter here to remove outliers, perc_fltr(0.01, 99.9)
    if filt:
        mask = np.ma.getmaskarray(bma)
        bma[bma < 0] = 0
        bma = np.ma.array(bma, mask=mask)

    #Print out stats
    print('\n')
    stats = malib.print_stats(bma)
    print('\n')

    count = stats[0]
    area = res**2 * count
    mean = stats[3]
    med = stats[5]

    s_m3 = np.ma.sum(bma) * res**2
    s_km3 = s_m3 / 1E9
    s_mwe = mean * rho
    s_gt = s_km3 * rho
    #s_mm = s_gt/374
    #https://climatesanity.wordpress.com/conversion-factors-for-ice-and-water-mass-and-volume/
    s_mm = s_gt / 360

    if dt_yr is not None:
        print("%s to %s: %0.2f yr" % (ts[0], ts[1], dt_yr))
        print("%0.0f m^3 (%0.0f m^3/yr)" % (s_m3, s_m3 / dt_yr))
        print("%0.3f km^3 (%0.3f km^3/yr)" % (s_km3, s_km3 / dt_yr))
        print("Density: %0.3f g/cc" % rho)
        print("%0.3f GT (%0.3f GT/yr)" % (s_gt, s_gt / dt_yr))
        print("%0.6f mm SLR (%0.6f mm/yr)" % (s_mm, s_mm / dt_yr))
        print("%0.3f m.w.e. (%0.3f m.w.e./yr)" % (s_mwe, s_mwe / dt_yr))
    else:
        print("Area: %0.2f km2" % (area / 1E6))
        print("%0.0f m^3" % s_m3)
        print("%0.3f km^3" % s_km3)
        print("Density: %0.3f g/cc" % rho)
        print("%0.3f GT" % s_gt)
        print("%0.6f mm SLR" % s_mm)
        print("%0.3f m.w.e." % s_mwe)
    print('\n')
Exemple #8
0
    #This will check input param for validity, could do beforehand
    dem1_ds, dem2_ds = warplib.memwarp_multi_fn([dem1_fn, dem2_fn], extent='intersection', res=res, t_srs='first')
    print("Loading input DEMs into masked arrays")
    dem1 = iolib.ds_getma(dem1_ds, 1)
    dem2 = iolib.ds_getma(dem2_ds, 1)
    dem2_ts = timelib.fn_getdatetime(dem2_fn)
    dz = dem2 - dem1
    outprefix = os.path.splitext(os.path.split(dem1_fn)[1])[0]+'_'+os.path.splitext(os.path.split(dem2_fn)[1])[0]
elif args.dz_fn is not None:
    dz_fn = args.dz_fn
    dem1_ds, dz_ds = warplib.memwarp_multi_fn([dem1_fn, dz_fn], extent='intersection', res=res, t_srs='first')
    print("Loading input DEM and Snow depth into masked arrays")
    dem1 = iolib.ds_getma(dem1_ds, 1)
    dz = iolib.ds_getma(dz_ds, 1)
    #Try to pull out second timestamp from dz_fn
    dem2_ts = timelib.fn_getdatetime_list(dz_fn)[-1]
    outprefix = os.path.splitext(os.path.split(dz_fn)[1])[0]

outprefix = os.path.join(args.outdir, outprefix)

#Calculate water year
wy = dem1_ts.year + 1
if dem1_ts.month >= 10:
    wy = dem1_ts.year

#These need to be updated in geolib to use gdaldem API
hs = geolib.gdaldem_mem_ds(dem1_ds, processing='hillshade', returnma=True)
hs_clim = (1,255)

dem_clim = malib.calcperc(dem1, (1,99))
res = geolib.get_res(dem1_ds)[0]
Exemple #9
0
def main():
    parser = getparser()
    args = parser.parse_args()

    t_unit = args.dt
    plot = args.plot
    remove_offsets = args.remove_offsets
    mask_fn = args.mask_fn
    if mask_fn is not None:
        remove_offsets = True

    #Input is 3-band disparity map, extract bands directly
    src_fn = args.disp_fn
    if not iolib.fn_check(src_fn):
        sys.exit("Unable to locate input file: %s" % src_fn)

    src_ds = iolib.fn_getds(src_fn)
    if src_ds.RasterCount != 3:
        sys.exit("Input file must be ASP disparity map (3 bands: x, y, mask)")
    #Extract pixel resolution
    h_res, v_res = geolib.get_res(src_ds)

    #Horizontal scale factor
    #If running on disparity_view output (gdal_translate -outsize 5% 5% F.tif F_5.tif)
    #h_res /= 20
    #v_res /= 20

    #Load horizontal and vertical disparities
    h = iolib.ds_getma(src_ds, bnum=1)
    v = iolib.ds_getma(src_ds, bnum=2)

    #ASP output has northward motion as negative values in band 2
    v *= -1

    t1, t2 = timelib.fn_getdatetime_list(src_fn)
    dt = t2 - t1
    #Default t_factor is in 1/years
    t_factor = timelib.get_t_factor(t1, t2)

    #Input timestamp arrays if inputs are mosaics
    if False:
        t1_fn = ''
        t2_fn = ''
        if os.path.exists(t1_fn) and os.path.exists(t2_fn):
            t_factor = timelib.get_t_factor_fn(t1_fn, t2_fn)
        if t_factor is None:
            sys.exit("Unable to determine input timestamps")

    if t_unit == 'day':
        t_factor *= 365.25

    print("Input dates:")
    print(t1)
    print(t2)
    print(dt)
    print(t_factor, t_unit)

    #Scale values for polar stereographic distortion
    srs = geolib.get_ds_srs(src_ds)
    proj_scale_factor = 1.0
    #Want to scale to get correct distances for polar sterographic
    if srs.IsSame(geolib.nps_srs) or srs.IsSame(geolib.sps_srs):
        proj_scale_factor = geolib.scale_ps_ds(src_ds)

    #Convert disparity values in pixels to m/t_unit
    h_myr = h * h_res * proj_scale_factor / t_factor
    h = None
    v_myr = v * v_res * proj_scale_factor / t_factor
    v = None

    #Velocity Magnitude
    m = np.ma.sqrt(h_myr**2 + v_myr**2)
    print("Velocity Magnitude stats")
    malib.print_stats(m)

    #Remove x and y offsets over control surfaces
    offset_str = ''
    if remove_offsets:
        if mask_fn is None:
            from demcoreg.dem_mask import get_mask
            print(
                "\nUsing demcoreg to prepare mask of stable control surfaces\n"
            )
            #TODO: Accept mask_list as in demcoreg
            #mask_list = args.mask_list
            # for now keep it simple, limit to non-glacier surfaces
            mask_list = [
                'glaciers',
            ]
            mask = get_mask(src_ds, mask_list=mask_list, dem_fn=src_fn)
        else:
            print("\nWarping input raster mask")
            #This can be from previous dem_mask.py run (e.g. *rockmask.tif)
            mask_ds = warplib.memwarp_multi_fn([
                mask_fn,
            ],
                                               res=src_ds,
                                               extent=src_ds,
                                               t_srs=src_ds)[0]
            mask = iolib.ds_getma(mask_ds)
            #The default from ds_getma is a masked array, so need to isolate boolean mask
            #Assume input is 0 for masked, 1 for unmasked (valid control surface)
            mask = mask.filled().astype('bool')
            #This should work, as the *rockmask.py is 1 for unmasked, 0 for masked, with ndv=0
            #mask = np.ma.getmaskarray(mask)
            #Vector mask - untested
            if os.path.splitext(mask_fn)[1] == 'shp':
                mask = geolib.shp2array(mask_fn, src_ds)

        print("\nRemoving median x and y offset over static control surfaces")
        h_myr_count = h_myr.count()
        h_myr_static_count = np.ma.array(h_myr, mask=mask).count()
        h_myr_mad, h_myr_med = malib.mad(np.ma.array(h_myr, mask=mask),
                                         return_med=True)
        v_myr_mad, v_myr_med = malib.mad(np.ma.array(v_myr, mask=mask),
                                         return_med=True)

        print("Static pixel count: %i (%0.1f%%)" %
              (h_myr_static_count,
               100 * float(h_myr_static_count) / h_myr_count))
        print("median (+/-NMAD)")
        print("x velocity offset: %0.2f (+/-%0.2f) m/%s" %
              (h_myr_med, h_myr_mad, t_unit))
        print("y velocity offset: %0.2f (+/-%0.2f) m/%s" %
              (v_myr_med, v_myr_mad, t_unit))
        h_myr -= h_myr_med
        v_myr -= v_myr_med
        offset_str = '_offsetcorr_h%0.2f_v%0.2f' % (h_myr_med, v_myr_med)
        #Velocity Magnitude
        m = np.ma.sqrt(h_myr**2 + v_myr**2)
        print("Velocity Magnitude stats after correction")
        malib.print_stats(m)

    if plot:
        fig_fn = os.path.splitext(src_fn)[0] + '.png'
        label = 'Velocity (m/%s)' % t_unit
        f, ax = make_plot(m, fig_fn, label)
        plotvec(h_myr, v_myr)
        plt.tight_layout()
        plt.savefig(fig_fn,
                    dpi=300,
                    bbox_inches='tight',
                    pad_inches=0,
                    edgecolor='none')

    print("Writing out files")
    gt = src_ds.GetGeoTransform()
    proj = src_ds.GetProjection()
    dst_fn = os.path.splitext(src_fn)[0] + '_vm%s.tif' % offset_str
    iolib.writeGTiff(m, dst_fn, create=True, gt=gt, proj=proj)
    dst_fn = os.path.splitext(src_fn)[0] + '_vx%s.tif' % offset_str
    iolib.writeGTiff(h_myr, dst_fn, create=True, gt=gt, proj=proj)
    dst_fn = os.path.splitext(src_fn)[0] + '_vy%s.tif' % offset_str
    iolib.writeGTiff(v_myr, dst_fn, create=True, gt=gt, proj=proj)
    src_ds = None
Exemple #10
0
#Ellispoidal heights at sea level should be above -100 m
zlim = (-200,8388)
zlim_pad = 150

#Window size for median and gauss filters (px)
size=3

#Input stack npz
stack_fn=sys.argv[1]

#User-provided timestamps with format YYYYMMDD
if len(sys.argv) > 2:
    dt_list_str = sys.argv[2:]
    if len(dt_list_str) == 1:
        dt_list_str = dt_list_str[0].split(' ')
    dt_list = [timelib.fn_getdatetime_list(dt_str)[0] for dt_str in dt_list_str]
else:
    #SRTM, then systematic ASTER timestamps
    dt_list = [datetime(2000,2,11), datetime(2000,5,31), datetime(2009,5,31), datetime(2018,5,31)]

#Use tif on disk if available
out_fn=os.path.splitext(stack_fn)[0]
#Otherwise load stack and compute trend/intercept if necessary

trend_fn=out_fn+'_trend.tif'
trend_ds = iolib.fn_getds(trend_fn)
trend = iolib.ds_getma(trend_ds)/365.25

intercept_fn=out_fn+'_intercept.tif'
#Hmmm, no 365.25 factor here.  Clean up here and in stack generation
intercept = iolib.fn_getma(intercept_fn)