Esempio n. 1
0
def get_raster_idx(x_vect, y_vect, pt_srs, ras_ds, max_slope=20):
    """Get raster index corresponding to the set of X,Y locations
    """
    #Convert input xy coordinates to raster coordinates
    mX_fltr, mY_fltr, mZ = geolib.cT_helper(x_vect, y_vect, 0, pt_srs, geolib.get_ds_srs(ras_ds))
    pX_fltr, pY_fltr = geolib.mapToPixel(mX_fltr, mY_fltr, ras_ds.GetGeoTransform())
    pX_fltr = np.atleast_1d(pX_fltr)
    pY_fltr = np.atleast_1d(pY_fltr)

    #Sample raster
    #This returns median and mad for ICESat footprint
    samp = geolib.sample(ras_ds, mX_fltr, mY_fltr, pad=pad)
    samp_idx = ~(np.ma.getmaskarray(samp[:,0]))
    npts = samp_idx.nonzero()[0].size

    if False:
        print("Applying slope filter, masking points with slope > %0.1f" % max_slope)
        slope_ds = geolib.gdaldem_mem_ds(ras_ds, processing='slope', returnma=False)
        slope_samp = geolib.sample(slope_ds, mX_fltr, mY_fltr, pad=pad)
        slope_samp_idx = (slope_samp[:,0] <= max_slope).data
        samp_idx = np.logical_and(slope_samp_idx, samp_idx)

    return samp, samp_idx, npts, pX_fltr, pY_fltr
Esempio n. 2
0
def map_plot(site_list, ds):
    a = iolib.ds_getma(ds)
    clim = malib.calcperc(a, (2, 98))
    mX = site_list[:, 1]
    mY = site_list[:, 2]
    pX, pY = geolib.mapToPixel(mX, mY, ds.GetGeoTransform())
    #f, ax = plt.subplots(1, figsize=(6,6), subplot_kw={'aspect':'equal', 'adjustable':'box-forced'})
    f, ax = plt.subplots(1, figsize=(6, 6), subplot_kw={'aspect': 'equal'})
    im = ax.imshow(a, vmin=clim[0], vmax=clim[1], cmap='inferno')
    ax.set_facecolor('0.5')
    from imview.lib import pltlib
    pltlib.add_scalebar(ax, geolib.get_res(ds)[0])
    ax.scatter(pX, pY, s=16, facecolors='w', edgecolors='k')
    for i, lbl in enumerate(site_list[:, 0]):
        bbox = dict(boxstyle='round,pad=0.1', fc='k', alpha=0.7)
        ax.annotate(str(int(lbl)),
                    xy=(pX[i], pY[i]),
                    xytext=(0, 4),
                    textcoords='offset points',
                    fontsize=8,
                    color='w',
                    bbox=bbox)
    return f
Esempio n. 3
0
def shp_overlay(ax, ds, shp_fn, gt=None, color='darkgreen'):
    from osgeo import ogr
    from pygeotools.lib import geolib
    #ogr2ogr -f "ESRI Shapefile" output.shp input.shp -clipsrc xmin ymin xmax ymax
    shp_ds = ogr.Open(shp_fn)
    lyr = shp_ds.GetLayer()
    lyr_srs = lyr.GetSpatialRef()
    lyr.ResetReading()
    nfeat = lyr.GetFeatureCount()
    #Note: this is inefficient for large numbers of features
    #Should produce collections of points or lines, then have single plot call
    for n, feat in enumerate(lyr):
        geom = feat.GetGeometryRef()
        geom_type = geom.GetGeometryType()
        #Points
        if geom_type == 1:
            mX, mY, z = geom.GetPoint()
            attr = {'marker':'o', 'markersize':5, 'linestyle':'None'}
        #Line
        elif geom_type == 2:
            l, mX, mY = geolib.line2pts(geom)
            z = 0
            #attr = {'marker':None, 'linestyle':'-', 'linewidth':0.5, 'alpha':0.8}
            attr = {'marker':None, 'linestyle':'-', 'linewidth':1.0, 'alpha':0.8}
            #attr = {'marker':'.', 'markersize':0.5, 'linestyle':'None'}
        #Polygon, placeholder
        #Note: this should be done with the matplotlib patch functionality
        #http://matplotlib.org/users/path_tutorial.html
        elif geom_type == 3:
            print("Polygon support not yet implemented")
            #ogr2ogr -nlt LINESTRING out.shp in.shp
            l, mX, mY = geolib.line2pts(geom)
            z = 0
            attr = {'marker':None, 'linestyle':'-', 'facecolor':'w'}

        ds_srs = geolib.get_ds_srs(ds) 
        if gt is None:
            gt = ds.GetGeoTransform()
        if not lyr_srs.IsSame(ds_srs):
            mX, mY, z = geolib.cT_helper(mX, mY, z, lyr_srs, ds_srs)

        #ds_extent = geolib.ds_extent(ds)
        ds_extent = geolib.ds_geom_extent(ds)
      
        mX = np.ma.array(mX)
        mY = np.ma.array(mY)

        mX[mX < ds_extent[0]] = np.ma.masked
        mX[mX > ds_extent[2]] = np.ma.masked
        mY[mY < ds_extent[1]] = np.ma.masked
        mY[mY > ds_extent[3]] = np.ma.masked

        mask = np.ma.getmaskarray(mY) | np.ma.getmaskarray(mX)
        mX = mX[~mask]
        mY = mY[~mask]

        if mX.count() > 0:
            ax.set_autoscale_on(False)
            if geom_type == 1: 
                pX, pY = geolib.mapToPixel(np.array(mX), np.array(mY), gt)
                ax.plot(pX, pY, color=color, **attr)
            else:
                l = np.ma.array(l)
                l = l[~mask]

                lmed = np.ma.median(np.diff(l))
                lbreaks = (np.diff(l) > lmed*2).nonzero()[0]
                if lbreaks.size: 
                    a = 0
                    lbreaks = list(lbreaks)
                    lbreaks.append(l.size)
                    for b in lbreaks:
                        mmX = mX[a:b+1]
                        mmY = mY[a:b+1]
                        a = b+1
                        #import ipdb; ipdb.set_trace()
                        #pX, pY = geolib.mapToPixel(np.array(mX), np.array(mY), gt)
                        pX, pY = geolib.mapToPixel(mmX, mmY, gt)
                        print(n, np.diff(pX).max(), np.diff(pY).max())
                        #ax.plot(pX, pY, color='LimeGreen', **attr)
                        #ax.plot(pX, pY, color='LimeGreen', alpha=0.5, **attr)
                        #ax.plot(pX, pY, color='w', alpha=0.5, **attr)
                        ax.plot(pX, pY, color=color, **attr)
                else:
                    pX, pY = geolib.mapToPixel(np.array(mX), np.array(mY), gt)
                    ax.plot(pX, pY, color=color, **attr)
def main():
    parser = argparse.ArgumentParser(
        description='Utility to extract raster values')
    #Add x, y, z column definitions as optional arguments (defaults 0, 1, 2)
    parser.add_argument('feat_fn', help='input feature filename "points.csv"')
    parser.add_argument('filelist',
                        nargs='+',
                        help='input filenames "img1.tif img2.tif..."')
    args = parser.parse_args()

    #Assumptions
    #Input csv has x,y,z as first three columns
    #cat jako_icesat_clip.csv | awk -F',' 'BEGIN {OFS=","} { print $8, $9, $1, $2}'
    #Input xyz coordinates have identical projection to input rasters

    #For input csv containing x,y,z coordinates
    #Load into np array
    ndv = np.nan
    #Expects column names in first line header
    #a = np.genfromtxt(args.feat_fn, delimiter=',', names=True)
    #a_ma = np.ma.masked_equal(a.view((np.float, len(a.dtype.names))), ndv)
    #header = list(a.dtype.names)
    a = np.genfromtxt(args.feat_fn, delimiter=',', dtype=None)
    a_ma = np.ma.masked_equal(a, ndv)
    header = ['x', 'y', 'z']

    #Grab header as separate list
    #with open(args.feat_fn, 'r') as f:
    #    reader = csv.reader(f)
    #    header = reader.next()
    #Now loads directly into 2d np array
    #a = np.loadtxt(args.feat_fn, delimiter=',', skiprows=1)

    #check number of records
    if a_ma.shape[0] == 0:
        sys.exit('Input csv contains no records')

    #Try to find columns with lon or [Xx]
    #Check to see if we have a z column, or multiple z columns

    #If x,y,z columns are specified, use those indices
    #xidx = -2
    xidx = 0
    #yidx = -1
    yidx = 1
    #zidx = None
    zidx = 2

    #Use this with array imported with genfromtxt
    #xcol = a_ma[a_ma.dtype.names[0]]
    #ycol = a_ma[a_ma.dtype.names[1]]
    #zcol = a_ma[a_ma.dtype.names[2]]

    #Keep everything in the output array
    out_ma = a_ma
    out_ndv = np.nan
    #out_ndv = -32768

    #x, y, z
    fmt = ['%0.6f', '%0.6f']

    for fn in args.filelist:
        #Want to reload these, as they are overwritten if coordTransform
        #Use this with array imported with loadtxt
        xcol = a_ma[:, xidx]
        ycol = a_ma[:, yidx]
        if zidx is not None:
            zcol = a_ma[:, zidx]
            fmt.append('%0.3f')

        #These short names are useless
        #DEM mosaics, use date
        #create dictionary, output key with full filenames

        #Apply smoothing filter to input datasets
        #This can be done on the fly, but some ds are very large, so write out product
        smooth = False
        if smooth:
            b_smooth_fn = os.path.splitext(fn)[0] + '_smooth.tif'
            if os.path.exists(b_smooth_fn):
                fn = b_smooth_fn
            else:
                """
                print 'Smoothing %s' % fn
                from lib import filtlib 
                size = 9
                ds = gdal.Open(fn, gdal.GA_ReadOnly)
                #This should be done for each band number bn
                #For now, just assume singleband
                bn = 1
                b = malib.fn_getma(fn)
                b_smooth = filtlib.gauss_fltr_astropy(b, size=size)
                malib.writeGTiff(b_smooth, b_smooth_fn, ds, bnum=bn) 
                #b = b_smooth
                fn = b_smooth_fn
                """

        shortName = fn[:8]
        ds = gdal.Open(fn, gdal.GA_ReadOnly)
        gt = ds.GetGeoTransform()
        res = (gt[1] - gt[5]) / 2
        rasterCRS = osr.SpatialReference()
        rasterCRS.ImportFromWkt(ds.GetProjectionRef())

        #Assume that input csv has same projection
        layerCRS = rasterCRS
        #layerCRS = geolib.wgs_srs

        coordTransform = None
        if not layerCRS.IsSame(rasterCRS):
            #Faster to use pyproj here?
            coordTransform = osr.CoordinateTransformation(layerCRS, rasterCRS)
            mapcoord = np.array([
                coordTransform.TransformPoint(x, y, 0)
                for x, y in zip(xcol, ycol)
            ])
            #xcol, ycol, zcol = zip(*mapcoord)
            xcol, ycol, zcol = np.hsplit(mapcoord, 3)

        for bn in range(1, ds.RasterCount + 1):
            print fn, bn
            b = malib.ds_getma(ds, bn)
            #Note, ndv should be universal, so output values are consistent
            ndv = b.fill_value

            rzcol = np.empty(xcol.shape)
            rzcol[:] = ndv

            #This uses np index arrays to do the extraction - very fast
            if True:
                #This may be off by one
                #Getting some errors IndexError: index (14187) out of range (0<=index<14186) in dimension 1
                pX, pY = geolib.mapToPixel(xcol, ycol, gt)
                #Need some kind of check here for pX and pY
                #if (pX.min() < 0 or pX.max() >= ds.RasterXSize) or (pY.min() < 0 or pY.max() >= ds.RasterYSize):
                #But how to preserve record with ndv using index arrays?
                #Could use masked arrays with fill values of 0,0 - assume that 0,0 will be nodata in input raster?
                #DONT use ma here
                pX = np.clip(pX, 0, ds.RasterXSize - 1)
                pY = np.clip(pY, 0, ds.RasterYSize - 1)
                #pX = np.ma.masked_outside(np.ma.around(pX), 0, ds.RasterXSize-1)
                #pY = np.ma.masked_outside(np.ma.around(pY), 0, ds.RasterYSize-1)
                rzcol = extractPoint(b, pX, pY)

            #This goes through record by record and extracts values
            #Safer for ndv handling and exceptions beyond b extent
            else:
                for i in range(xcol.size):
                    x = xcol[i]
                    y = ycol[i]

                    #Need to vectorize this to process entire array up front
                    if coordTransform is not None:
                        cT = coordTransform.TransformPoint(x, y, z)
                        x = cT[0]
                        y = cT[1]

                    #Create index arrays
                    pX, pY = geolib.mapToPixel([x], [y], gt)

                    #Make sure pixel location is within map coordinates
                    if (pX < 0 or pX >= ds.RasterXSize) or (
                            pY < 0 or pY >= ds.RasterYSize):
                        rz = out_ndv
                    else:
                        #Extract Z value at a single pixel
                        rz = extractPoint(b, pX, pY)

                        #Pull out instrument ID and add logic for block size
                        #ICESat is ~70 m diameter for early shots, ~35 m diameter for later shots
                        #block_m = np.array([35, 35])
                        #ATM L2 is roughly 50 m along-track and 80 m cross-track
                        #Need to compute heading to properly sample these blocks, for now assume diameter 50 m
                        #block_m = np.array([25, 25])
                        #block_p = np.around(block_m/res)
                        #rz = extractBlock(b, pX, pY, block_p[0], block_p[1])
                    rzcol[i] = rz

            #rzcol = np.ma.fix_invalid(rzcol, copy=False)
            #This is necessary b/c np.nan is float, can't set fill for int
            rzcol = np.ma.fix_invalid(rzcol, copy=False).astype(np.float)
            rzcol.set_fill_value(out_ndv)

            #Use shortname for shp
            #colname = shortName + '_b' + str(bn)

            colname = os.path.splitext(
                os.path.split(fn)[1])[0] + '_b' + str(bn)
            header.append(colname)
            out_ma = np.ma.column_stack((out_ma, rzcol))
            fmt.append('%0.3f')

            #Compute difference values
            if zidx is not None:
                dzcol = zcol - rzcol
                dzcol.set_fill_value(out_ndv)
                stats = malib.print_stats(dzcol)
                genhist(dzcol)
                header.append(colname + '_dz')
                out_ma = np.ma.column_stack((out_ma, dzcol))
                fmt.append('%0.3f')
        b = None
    ds = None

    dst_fn = os.path.splitext(args.feat_fn)[0] + '_extractZ.csv'
    f = open(dst_fn, 'w')
    writer = csv.writer(f)
    writer.writerow(header)
    #np.savetxt(f, out_ma, delimiter=",", fmt=fmt)
    np.savetxt(f, out_ma, delimiter=",", fmt=('%0.6f'))
    f.close()
Esempio n. 5
0
    x_fltr = glas_pts_fltr[:,xcol]
    y_fltr = glas_pts_fltr[:,ycol]
    z_fltr = glas_pts_fltr[:,zcol]

    dem_mask_fn = os.path.splitext(dem_fn)[0]+'_ref.tif'
    if os.path.exists(dem_mask_fn):
        print("Loading Masked DEM: %s" % dem_mask_fn)
        dem_mask_ds = gdal.Open(dem_mask_fn) 
        dem_mask = iolib.ds_getma(dem_mask_ds) 
    else:
        dem_mask_ds = dem_ds
        dem_mask = dem_ma

    #Convert input xy coordinates to raster coordinates
    mX_fltr, mY_fltr, mZ = geolib.cT_helper(x_fltr, y_fltr, 0, pt_srs, geolib.get_ds_srs(dem_mask_ds))
    pX_fltr, pY_fltr = geolib.mapToPixel(mX_fltr, mY_fltr, dem_mask_ds.GetGeoTransform())
    pX_fltr = np.atleast_1d(pX_fltr)
    pY_fltr = np.atleast_1d(pY_fltr)

    #Sample raster
    #This returns median and mad for ICESat footprint
    samp = geolib.sample(dem_mask_ds, mX_fltr, mY_fltr, pad=pad)
    samp_idx = ~(np.ma.getmaskarray(samp[:,0]))
    npts = samp_idx.nonzero()[0].size
    if npts < min_pts:
        print("Not enough points after sampling valud pixels, post bareground mask (%i < %i)" % (npts, min_pts))
        continue
       
    if True:
        print("Applying slope filter, masking points with slope > %0.1f" % max_slope)
        slope_ds = geolib.gdaldem_mem_ds(dem_mask_ds, processing='slope', returnma=False)
Esempio n. 6
0
def plot_point_map(mx, my):
    ex, ey = geolib.mapToPixel(mx, my, gt)
    plot_point(ex, ey)
#Stereo2SWE preliminary products
dem_fn = '/Users/dshean/Documents/UW/SnowEx/preliminary_mos_20170504/gm_8m-tile-0.tif'
hs_fn = '/Users/dshean/Documents/UW/SnowEx/preliminary_mos_20170504/gm_8m-tile-0_hs_az315.tif'
#snowdepth_fn = '/Users/dshean/Documents/UW/SnowEx/preliminary_snowdepth_20170606/snowdepth_20170201-20170317_mos-tile-0.tif'
snowdepth_fn = '/Users/dshean/Documents/UW/SnowEx/preliminary_snowdepth_20170606/snowdepth_tif/snowdepth_20170201-20170317_mos-tile-0_filt5px.tif'

#Load and clip to common extent
dem_ds, hs_ds, snowdepth_ds = warplib.memwarp_multi_fn(
    [dem_fn, hs_fn, snowdepth_fn], extent='union')
dem = iolib.ds_getma(dem_ds)
hs = iolib.ds_getma(hs_ds)
snowdepth = iolib.ds_getma(snowdepth_ds)

#Pixel coordinates of sample sites
x, y = geolib.mapToPixel(b[:, 1], b[:, 2], dem_ds.GetGeoTransform())
depth = b[:, 3]
rho = b[:, 4]

#Sample DEM snow depth
samp = geolib.sample(snowdepth_ds, b[:, 1], b[:, 2], pad=5)

#Filter to throw out samples with significant roughness over sampled area
samp_perc_thresh = 0.3
samp_mask = (samp[:, 1] / samp[:, 0]) > samp_perc_thresh
depth_diff = depth - samp[:, 0]
depth_diff[samp_mask] = np.ma.masked
idx = np.ma.argsort(np.abs(depth_diff))[::-1]
x = x[idx]
y = y[idx]
depth = depth[idx]
Esempio n. 8
0
    if os.path.exists(dem_mask_fn):
        print("Writing out %i ICESat-GLAS shots for co-registration" % glas_pts_fltr_coreg.shape[0])
        out_csv_fn_coreg = os.path.splitext(out_csv_fn)[0]+'_ref.csv'
        #lat,lon,elev_ground for pc_align
        out_csv_fn_coreg_asp = os.path.splitext(out_csv_fn)[0]+'_ref_asp.csv'
        #Could add DEM samp columns here
        np.savetxt(out_csv_fn_coreg, glas_pts_fltr_coreg, header=hdr_out, fmt=fmt_out, delimiter=',', comments='')
        np.savetxt(out_csv_fn_coreg_asp, glas_pts_fltr_coreg_asp, fmt='%0.6f, %0.6f, %0.2f', delimiter=',')

    # For plotting the qfilt ICESat-GLAS used for co-reg
    x_fltr_mask_coreg = glas_pts_fltr_coreg[:,xcol]
    y_fltr_mask_coreg = glas_pts_fltr_coreg[:,ycol]
    z_fltr_mask_coreg = glas_pts_fltr_coreg[:,zcol]
    mX_fltr_mask_coreg, mY_fltr_mask_coreg, mZ_coreg = geolib.cT_helper(x_fltr_mask_coreg, y_fltr_mask_coreg, 0, pt_srs, geolib.get_ds_srs(dem_mask_ds))
    pX_fltr_mask_coreg, pY_fltr_mask_coreg = geolib.mapToPixel(mX_fltr_mask_coreg, mY_fltr_mask_coreg, dem_mask_ds.GetGeoTransform())
    pX_fltr_mask_coreg = np.atleast_1d(pX_fltr_mask_coreg)
    pY_fltr_mask_coreg = np.atleast_1d(pY_fltr_mask_coreg)

    # For plotting the qfilt ICESat-GLAS used for examining all valid surfaces
    x_fltr_mask_valsurf = glas_pts_fltr_valsurf[:,xcol]
    y_fltr_mask_valsurf = glas_pts_fltr_valsurf[:,ycol]
    z_fltr_mask_valsurf = glas_pts_fltr_valsurf[:,zcol]
    mX_fltr_mask_valsurf, mY_fltr_mask_valsurf, mZ_valsurf = geolib.cT_helper(x_fltr_mask_valsurf, y_fltr_mask_valsurf, 0, pt_srs, geolib.get_ds_srs(dem_chmmask_ds))
    pX_fltr_mask_valsurf, pY_fltr_mask_valsurf = geolib.mapToPixel(mX_fltr_mask_valsurf, mY_fltr_mask_valsurf, dem_chmmask_ds.GetGeoTransform())
    pX_fltr_mask_valsurf = np.atleast_1d(pX_fltr_mask_valsurf)
    pY_fltr_mask_valsurf = np.atleast_1d(pY_fltr_mask_valsurf)

    # Get the elev dif b/w GLAS and DEM
    dz = z_fltr_mask_coreg - coreg_samp[coreg_samp_idx,0]
Esempio n. 9
0
out_ysize_m = ul_y - lr_y

#Output grid resolution
#Might want to take min of these two?  Or just use range res?
res = np.mean([az_pixel_spacing.mean(), range_pixel_spacing])

#Initialize output grid
out_nl = int(round(out_ysize_m/res))
out_ns = int(round(out_xsize_m/res))
out = np.zeros((out_nl, out_ns))

#Want to double check whether we're using center vs corner of UL pixel - add 0.5 px offsets
out_gt = [ul_x, res, 0.0, ul_y, 0.0, -res]

#Compute pixel coordinates for GPRI origin in output grid
ref_coord_px = geolib.mapToPixel(ref_coord_proj[0], ref_coord_proj[1], out_gt)

#Create arrays of x and y map coordinates for each output grid cell
out_y_px, out_x_px = np.indices(out.shape)
out_x_map, out_y_map = geolib.pixelToMap(out_x_px, out_y_px, out_gt)

#Extract DEM elevations for output map coordinates
dem_x_px, dem_y_px = geolib.mapToPixel(out_x_map, out_y_map, dem_gt)
z = extractPoint(dem, dem_x_px, dem_y_px) 

#Want to clean these up and define functions

#Compute range (meters) for each x,y,z in output grid
r = np.sqrt((out_x_map-ref_coord_proj[0])**2 + (out_y_map-ref_coord_proj[1])**2 + (z-ref_coord_proj[2])**2) 
#az_px = az_N + np.arctan2((out_x_map-ref_coord_proj[0]), (out_y_map-ref_coord_proj[1])) / az_angle_step