Beispiel #1
0
def raster_shpclip(r_fn, shp_fn, extent='raster'):
    r_ds = iolib.fn_getds(r_fn)
    r_srs = geolib.get_ds_srs(r_ds)
    r_extent = geolib.ds_extent(r_ds)

    shp_ds = ogr.Open(shp_fn)
    lyr = shp_ds.GetLayer()
    shp_srs = lyr.GetSpatialRef()
    shp_extent = lyr.GetExtent()

    #Define the output - can set to either raster or shp
    #Accept as cl arg
    out_srs = r_srs

    if extent == 'raster':
        out_extent = r_extent 
    elif extent == 'shp':
        out_extent = shp_extent

    #r = iolib.ds_getma(r_ds)
    r_ds = warplib.memwarp(r_ds, extent=out_extent, t_srs=out_srs, r='cubic')
    r = iolib.ds_getma(r_ds)

    mask = geolib.shp2array(shp_fn, r_ds)

    r = np.ma.array(r, mask=mask)
    return r
Beispiel #2
0
def site_filter_extent_ds(ds, pad=None):
    """
    Filter available sites for a given dataset
    """
    snotel_srs = geolib.wgs_srs
    ds_srs = geolib.get_ds_srs(ds)
    extent = geolib.ds_extent(ds)
    #extent = geolib.ds_extent(ds, snotel_srs)
    #geom = geolib.get_outline(ds)
    return site_filter_extent(extent, ds_srs, pad)
Beispiel #3
0
def get_cam2rpc_opts(t='pinhole', dem=None, gsd=None, num_samples=50):
    """
    generates cmd for ASP cam2rpc
    This generates rpc camera models from the optimized frame camera models
    See documentation here: https://stereopipeline.readthedocs.io/en/latest/tools/cam2rpc.html
    Parameters
    ----------
    t: str
        session, or for here, type of input camera, default: pinhole 
    dem: str
        path to DEM which will be used for calculating RPC polynomials
    gsd: float 
        Expected ground-samplind distance
    num_samples: int 
        Sampling for RPC approximation calculation (default=50)
    Returns
    ----------
    cam2rpc_opts: list
        A list of arguments for cam2rpc call.
    """

    cam2rpc_opts = []
    cam2rpc_opts.extend(['--dem-file', dem])
    dem_ds = iolib.fn_getds(dem)
    dem_proj = dem_ds.GetProjection()
    dem = iolib.ds_getma(dem_ds)
    min_height, max_height = np.percentile(dem.compressed(), (0.01, 0.99))
    tsrs = epsg2geolib(4326)
    xmin, ymin, xmax, ymax = geolib.ds_extent(ds, tsrs)
    cam2rpc_opts.extend(['--height-range', str(min_height), str(max_height)])
    cam2rpc_opts.extend(
        ['--lon-lat-range',
         str(xmin),
         str(ymin),
         str(xmax),
         str(ymax)])
    if gsd:
        cam2rpc_opts.extend(['--gsd', str(gsd)])
    cam2rpc_opts.extend(['--session', t])
    cam2rpc_opts.extend(['--num-samples', str(num_samples)])
    return cam2rpc_opts
Beispiel #4
0
def main():
    parser = getparser()
    args = parser.parse_args()
    refdem = args.refdem
    srcdem = args.srcdem
    outfolder = '{}__{}_comparison_stats'.format(
        os.path.splitext(os.path.basename(refdem))[0],
        os.path.splitext(os.path.basename(srcdem))[0])
    header_str = '{}__{}'.format(
        os.path.splitext(os.path.basename(refdem))[0],
        os.path.splitext(os.path.basename(srcdem))[0])
    if not os.path.exists(outfolder):
        os.makedirs(outfolder)
    if args.local_ortho == 1:
        temp_ds = warplib.memwarp_multi_fn([refdem, srcdem])[0]
        bbox = geolib.ds_extent(temp_ds)
        geo_crs = temp_ds.GetProjection()
        print('Bounding box lon_lat is{}'.format(bbox))
        bound_poly = Polygon([[bbox[0], bbox[3]], [bbox[2], bbox[3]],
                              [bbox[2], bbox[1]], [bbox[0], bbox[1]]])
        bound_shp = gpd.GeoDataFrame(index=[0],
                                     geometry=[bound_poly],
                                     crs=geo_crs)
        bound_centroid = bound_shp.centroid
        cx = bound_centroid.x.values[0]
        cy = bound_centroid.y.values[0]
        pad = np.ptp([bbox[3], bbox[1]]) / 6.0
        lat_1 = bbox[1] + pad
        lat_2 = bbox[3] - pad
        local_ortho = "+proj=ortho +lat_1={} +lat_2={} +lat_0={} +lon_0={} +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs".format(
            lat_1, lat_2, cy, cx)
        logging.info('Local Ortho projection is {}'.format(local_ortho))
        t_srs = local_ortho
    else:
        t_srs = 'first'
    # this step performs the desired warping operation
    ds_list = warplib.memwarp_multi_fn([refdem, srcdem],
                                       res=args.comparison_res,
                                       t_srs=t_srs)
    refma = iolib.ds_getma(ds_list[0])
    srcma = iolib.ds_getma(ds_list[1])
    init_diff = refma - srcma
    init_stats = malib.get_stats_dict(init_diff)
    print("Original descriptive statistics {}".format(init_stats))
    init_diff_json_fn = os.path.join(
        outfolder, '{}_precoreg_descriptive_stats.json'.format(header_str))
    init_diff_json = json.dumps(init_stats)

    with open(init_diff_json_fn, 'w') as f:
        f.write(init_diff_json)
    logging.info("Saved initial stats at {}".format(init_diff_json))
    refslope = gdaldem(ds_list[0])
    # stats for elevation difference vs reference DEM elevation
    elev_bin, diff_mean, diff_median, diff_std, diff_perc = cummulative_profile(
        refma, init_diff, args.elev_bin_width)
    # stats for elevation difference vs reference DEM slope
    slope_bin, diff_mean_s, diff_median_s, diff_std_s, diff_perc_s = cummulative_profile(
        refslope, init_diff, args.slope_bin_width)
    f, ax = plt.subplots(1, 2, figsize=(10, 4))
    im = ax[0].scatter(elev_bin, diff_mean, c=diff_perc, cmap='inferno')
    ax[0].set_xlabel('Elevation (m)')
    divider = make_axes_locatable(ax[0])
    cax = divider.append_axes('right', size='2.5%', pad=0.05)
    plt.colorbar(im,
                 cax=cax,
                 orientation='vertical',
                 label='pixel count percentage')
    im2 = ax[1].scatter(slope_bin, diff_mean_s, c=diff_perc_s, cmap='inferno')
    ax[1].set_xlabel('Slope (degrees)')
    divider = make_axes_locatable(ax[1])
    cax = divider.append_axes('right', size='2.5%', pad=0.05)
    plt.colorbar(im2,
                 cax=cax,
                 orientation='vertical',
                 label='pixel count percentage')

    for axa in ax.ravel():
        axa.axhline(y=0, c='k')
        axa.set_ylabel('Elevation Difference (m)')
    plt.tight_layout()
    precoreg_plot = os.path.join(outfolder,
                                 header_str + '_precoreg_binned_plot.png')
    f.savefig(precoreg_plot, dpi=300, bbox_inches='tight', pad_inches=0.1)
    logging.info("Saved binned plot at {}".format(precoreg_plot))
    if args.coreg == 1:
        logging.info("will attempt coregisteration")
        if args.local_ortho == 1:
            ref_local_ortho = os.path.splitext(refdem)[0] + '_local_ortho.tif'
            src_local_ortho = os.path.splitext(srcdem)[0] + '_local_ortho.tif'
            # coregisteration works best at mean resolution
            # we will rewarp if the initial args.res was not mean
            if args.comparison_res != 'mean':
                ds_list = warplib.memwarp_multi_fn([refdem, srcdem],
                                                   res='mean',
                                                   t_srs=t_srs)
                refma = iolib.ds_getma(ds_list[0])
                srcma = iolib.ds_getma(ds_list[1])
            iolib.writeGTiff(refma, ref_local_ortho, ds_list[0])
            iolib.writeGTiff(srcma, src_local_ortho, ds_list[1])
            coreg_ref = ref_local_ortho
            src_ref = src_local_ortho
        else:
            coreg_ref = refdem
            src_ref = srcdem
        demcoreg_dir = os.path.join(outfolder, 'coreg_results')
        align_opts = [
            '-mode', 'nuth', '-max_iter', '12', '-max_offset', '400',
            '-outdir', demcoreg_dir
        ]
        align_args = [coreg_ref, src_ref]
        align_cmd = ['dem_align.py'] + align_opts + align_args
        subprocess.call(align_cmd)
        #ah final round of warping and stats calculation
        try:
            srcdem_align = glob.glob(os.path.join(demcoreg_dir,
                                                  '*align.tif'))[0]
            logging.info(
                "Attempting stats calculation for aligned DEM {}".format(
                    srcdem_align))
            ds_list = warplib.memwarp_multi_fn([args.refdem, srcdem_align],
                                               res=args.comparison_res,
                                               t_srs=t_srs)
            refma = iolib.ds_getma(ds_list[0])
            srcma = iolib.ds_getma(ds_list[1])
            # this is creepy, but I am recycling variable names to save on memory
            init_diff = refma - srcma
            init_stats = malib.get_stats_dict(init_diff)
            print("Final descriptive statistics {}".format(init_stats))
            init_diff_json_fn = os.path.join(
                outfolder,
                '{}_postcoreg_descriptive_stats.json'.format(header_str))
            init_diff_json = json.dumps(init_stats)

            with open(init_diff_json_fn, 'w') as f:
                f.write(init_diff_json)
            logging.info("Saved final stats at {}".format(init_diff_json))
            refslope = gdaldem(ds_list[0])
            # stats for elevation difference vs reference DEM elevation
            elev_bin, diff_mean, diff_median, diff_std, diff_perc = cummulative_profile(
                refma, init_diff, args.elev_bin_width)
            # stats for elevation difference vs reference DEM slope
            slope_bin, diff_mean_s, diff_median_s, diff_std_s, diff_perc_s = cummulative_profile(
                refslope, init_diff, args.slope_bin_width)
            f, ax = plt.subplots(1, 2, figsize=(10, 4))
            im = ax[0].scatter(elev_bin,
                               diff_mean,
                               c=diff_perc,
                               cmap='inferno')
            ax[0].set_xlabel('Elevation (m)')
            divider = make_axes_locatable(ax[0])
            cax = divider.append_axes('right', size='2.5%', pad=0.05)
            plt.colorbar(im,
                         cax=cax,
                         orientation='vertical',
                         label='pixel count percentage')
            im2 = ax[1].scatter(slope_bin,
                                diff_mean_s,
                                c=diff_perc_s,
                                cmap='inferno')
            ax[1].set_xlabel('Slope (degrees)')
            divider = make_axes_locatable(ax[1])
            cax = divider.append_axes('right', size='2.5%', pad=0.05)
            plt.colorbar(im2,
                         cax=cax,
                         orientation='vertical',
                         label='pixel count percentage')

            for axa in ax.ravel():
                axa.axhline(y=0, c='k')
                axa.set_ylabel('Elevation Difference (m)')
            plt.tight_layout()
            precoreg_plot = os.path.join(
                outfolder, header_str + '_postcoreg_binned_plot.png')
            f.savefig(precoreg_plot,
                      dpi=300,
                      bbox_inches='tight',
                      pad_inches=0.1)
        except:
            logging.info(
                "Failed to compute post coreg stats, see corresponding job log"
            )
        logging.info("Script is complete !")
Beispiel #5
0
def warp_multi(src_ds_list, res='first', extent='intersection', t_srs='first', r='cubic', warptype=memwarp, outdir=None, dst_ndv=None, verbose=True, debug=False):
    """This parses and checks inputs, then calls desired warp function with appropriate arguments for each input ds
    
    Parameters
    ----------
    src_ds_list : list of gdal.Dataset objects
        List of original datasets to be warped
    res : arbitrary type
        Desired output resolution
    extent : arbitrary type
        Desired output extent
    t_srs : arbitrary type
        Desired output spatial reference
    r : str
        Desired resampling algorithm
    warptype : function
        Desired warp type (write to memory or disk)
    outdir : str
        Desired output directory (for disk warp)
    dst_ndv : float
        Desired output NoData Value
    verbose : bool 
        Print warp parameters
    debug : bool 
        Print extra information for debugging purposes

    Returns
    -------
    out_ds_list : list of gdal.Dataset objects
        List of warped datasets (either in memory or on disk)
    """
    #Type cast arguments as str for evaluation
    #Avoid path errors
    #res = str(res)
    #extent = str(extent)
    #t_srs = str(t_srs)

    #Parse the input
    t_srs = parse_srs(t_srs, src_ds_list)
    res = parse_res(res, src_ds_list, t_srs)
    extent = parse_extent(extent, src_ds_list, t_srs)

    if verbose:
        print("\nWarping all inputs to the following:")
        print("Resolution: %s" % res)
        print("Extent: %s" % str(extent))
        print("Projection: '%s'" % t_srs.ExportToProj4())
        print("Resampling alg: %s\n" % r)  

    out_ds_list = []
    for i, ds in enumerate(src_ds_list):
        fn_list = ds.GetFileList()
        fn = '[memory]'
        if fn_list is not None:
            fn = fn_list[0]
        if verbose:
            print("%i of %i: %s" % (i+1, len(src_ds_list), fn))

        #If input srs are different, must warp
        ds_t_srs = geolib.get_ds_srs(ds)
        srscheck = bool(t_srs.IsSame(ds_t_srs))
       
        if debug:
            print('\n%s' % ds_t_srs.ExportToWkt())
            print('%s\n' % t_srs.ExportToWkt())
            print('srscheck: %s\n' % srscheck)

        rescheck = False
        extentcheck = False

        #if srscheck:
        #Extract info from ds to see if warp is necessary
        ds_res = geolib.get_res(ds, square=True)[0]
        ds_extent = geolib.ds_extent(ds)

        #Note: these checks necessary to handle rounding and precision issues
        #Round extent and res to nearest mm
        precision = 1E-3
        #Or if t_srs has units of degrees
        if ds_t_srs.IsGeographic():
            precision = 1E-8

        rescheck = (res is None) or geolib.res_compare(res, ds_res, precision=precision)
        extentcheck = (extent is None) or geolib.extent_compare(extent, ds_extent, precision=precision)

        if debug:
            print('\n%s, %s\n' % (ds_res, res)) 
            print('%s' % ds_extent)
            print('%s\n' % extent) 
            print('rescheck: %s' % rescheck)
            print('extentcheck: %s\n' % extentcheck)

        #If the ds passes all three, it is identical to desired output, short circuit
        if rescheck and extentcheck and srscheck:
            out_ds_list.append(ds)
        else:
            dst_ds = warptype(ds, res, extent, t_srs, r, outdir, dst_ndv=dst_ndv, verbose=verbose)
            out_ds_list.append(dst_ds)

    return out_ds_list
Beispiel #6
0
                            ('meltwater', 'total_mmSLEa')]].sum())

#Compile stats for each division
print("\nTotal Gt/a for each aggregation")
print('agg', 'total_Gta', 'mb_mwea')
for i in [glac_df_mb_basin, glac_df_mb_region, glac_df_mb_qdgc]:
    print(i.df_name, i[('mb_mwea', 'total_Gta')].sum(), i['mb_mwea',
                                                          'mean'].mean())

if False:
    fig, ax = plt.subplots()
    ax.set_aspect('equal')
    print("Loading shaded relief map")
    hs_ds = gdal.Open(hs_fn)
    hs = iolib.ds_getma(hs_ds)
    hs_extent = geolib.ds_extent(hs_ds)
    hs_extent_cartopy = cartopy_extent(hs_extent)
    print("Plotting image")
    ax.imshow(hs,
              cmap='gray',
              origin='upper',
              extent=hs_extent_cartopy,
              transform=crs,
              alpha=0.6)
else:
    hs = None
"""
#This is currently broken
import cartopy.feature as cfeature
borders = cfeature.NaturalEarthFeature(category='cultural', name='admin_1_states_provinces_lines', scale='50m', facecolor='none')
ax.add_feature(borders, edgecolor='k')
Beispiel #7
0
def main(args=None):
    parser = getparser()
    args = parser.parse_args()

    # Should check that files exist
    ref_dem_fn = args.ref_fn
    src_dem_fn = args.src_fn

    mode = args.mode
    mask_list = args.mask_list
    max_offset = args.max_offset
    max_dz = args.max_dz
    slope_lim = tuple(args.slope_lim)
    tiltcorr = args.tiltcorr
    polyorder = args.polyorder
    res = args.res

    # Maximum number of iterations
    max_iter = args.max_iter

    # These are tolerances (in meters) to stop iteration
    tol = args.tol
    min_dx = tol
    min_dy = tol
    min_dz = tol

    outdir = args.outdir
    if outdir is None:
        outdir = os.path.splitext(src_dem_fn)[0] + '_dem_align'

    if tiltcorr:
        outdir += '_tiltcorr'
        tiltcorr_done = False
        # Relax tolerance for initial round of co-registration
        # tiltcorr_tol = 0.1
        # if tol < tiltcorr_tol:
        #    tol = tiltcorr_tol

    if not os.path.exists(outdir):
        os.makedirs(outdir)

    outprefix = '%s_%s' % (os.path.splitext(os.path.split(src_dem_fn)[-1])[0],
                           os.path.splitext(os.path.split(ref_dem_fn)[-1])[0])
    outprefix = os.path.join(outdir, outprefix)

    print("\nReference: %s" % ref_dem_fn)
    print("Source: %s" % src_dem_fn)
    print("Mode: %s" % mode)
    print("Output: %s\n" % outprefix)

    src_dem_ds = gdal.Open(src_dem_fn)
    ref_dem_ds = gdal.Open(ref_dem_fn)

    # Get local cartesian coordinate system
    # local_srs = geolib.localtmerc_ds(src_dem_ds)
    # Use original source dataset coordinate system
    # Potentially issues with distortion and xyz/tiltcorr offsets for DEM with large extent
    local_srs = geolib.get_ds_srs(src_dem_ds)
    # local_srs = geolib.get_ds_srs(ref_dem_ds)

    # Resample to common grid
    ref_dem_res = geolib.get_res(ref_dem_ds, t_srs=local_srs, square=False)
    # Create a copy to be updated in place
    src_dem_ds_align = iolib.mem_drv.CreateCopy('', src_dem_ds, 0)
    src_dem_res = geolib.get_res(src_dem_ds, t_srs=local_srs, square=False)
    src_dem_ds = None
    # Resample to user-specified resolution
    ref_dem_ds, src_dem_ds_align = warplib.memwarp_multi([ref_dem_ds, src_dem_ds_align],
                                                         extent='intersection', res=args.res, t_srs=local_srs,
                                                         r='cubic')

    res = geolib.get_res(src_dem_ds_align, square=False)
    print("\nReference DEM res: %0.2s" % ref_dem_res)
    print("Source DEM res: %0.2s" % src_dem_res)
    print("Resolution for coreg: %s (%0.2s m)\n" % (args.res, res))

    # Iteration number
    n = 1
    # Cumulative offsets
    dx_total = 0
    dy_total = 0
    dz_total = 0

    # Now iteratively update geotransform and vertical shift
    while True:
        print("*** Iteration %i ***" % n)
        dx, dy, dz, static_mask, fig = compute_offset(ref_dem_ds, src_dem_ds_align, src_dem_fn, mode, max_offset,
                                                      mask_list=mask_list, max_dz=max_dz, slope_lim=slope_lim,
                                                      plot=True)
        xyz_shift_str_iter = "dx=%+0.2fm, dy=%+0.2fm, dz=%+0.2fm" % (dx, dy, dz)
        print("Incremental offset: %s" % xyz_shift_str_iter)

        dx_total += dx
        dy_total += dy
        dz_total += dz

        xyz_shift_str_cum = "dx=%+0.2fm, dy=%+0.2fm, dz=%+0.2fm" % (dx_total, dy_total, dz_total)
        print("Cumulative offset: %s" % xyz_shift_str_cum)
        # String to append to output filenames
        xyz_shift_str_cum_fn = '_%s_x%+0.2f_y%+0.2f_z%+0.2f' % (mode, dx_total, dy_total, dz_total)

        # Should make an animation of this converging
        if n == 1:
            # static_mask_orig = static_mask
            if fig is not None:
                dst_fn = outprefix + '_%s_iter%02i_plot.png' % (mode, n)
                print("Writing offset plot: %s" % dst_fn)
                fig.gca().set_title("Incremental: %s\nCumulative: %s" % (xyz_shift_str_iter, xyz_shift_str_cum))
                fig.savefig(dst_fn, dpi=300)

        # Apply the horizontal shift to the original dataset
        src_dem_ds_align = coreglib.apply_xy_shift(src_dem_ds_align, dx, dy, createcopy=False)
        # Should
        src_dem_ds_align = coreglib.apply_z_shift(src_dem_ds_align, dz, createcopy=False)

        n += 1
        print("\n")
        # If magnitude of shift in all directions is less than tol
        # if n > max_iter or (abs(dx) <= min_dx and abs(dy) <= min_dy and abs(dz) <= min_dz):
        # If magnitude of shift is less than tol
        dm = np.sqrt(dx ** 2 + dy ** 2 + dz ** 2)
        dm_total = np.sqrt(dx_total ** 2 + dy_total ** 2 + dz_total ** 2)

        if dm_total > max_offset:
            sys.exit(
                "Total offset exceeded specified max_offset (%0.2f m). Consider increasing -max_offset argument" %
                max_offset)

        # Stop iteration
        if n > max_iter or dm < tol:

            if fig is not None:
                dst_fn = outprefix + '_%s_iter%02i_plot.png' % (mode, n)
                print("Writing offset plot: %s" % dst_fn)
                fig.gca().set_title("Incremental:%s\nCumulative:%s" % (xyz_shift_str_iter, xyz_shift_str_cum))
                fig.savefig(dst_fn, dpi=300)

            # Compute final elevation difference
            if True:
                ref_dem_clip_ds_align, src_dem_clip_ds_align = warplib.memwarp_multi([ref_dem_ds, src_dem_ds_align],
                                                                                     res=res, extent='intersection',
                                                                                     t_srs=local_srs, r='cubic')
                ref_dem_align = iolib.ds_getma(ref_dem_clip_ds_align, 1)
                src_dem_align = iolib.ds_getma(src_dem_clip_ds_align, 1)
                # ref_dem_clip_ds_align = None

                diff_align = src_dem_align - ref_dem_align
                src_dem_align = None
                ref_dem_align = None

                # Get updated, final mask
                mask_glac = get_mask(src_dem_clip_ds_align, mask_list, src_dem_fn, erode=False)
                # mask_glac_erode = get_mask(src_dem_clip_ds_align, mask_list, src_dem_fn, erode=False)
                mask_glac = np.logical_or(np.ma.getmaskarray(diff_align), mask_glac)

                # Final stats, before outlier removal
                diff_align_compressed = diff_align[~mask_glac]
                diff_align_stats = malib.get_stats_dict(diff_align_compressed, full=True)

                # Prepare filtered version for tiltcorr fit

                # 冰川区内大坡度区域
                slope = get_filtered_slope(src_dem_clip_ds_align, slope_lim=(0.01, 35))
                mask_glac_outlier = np.logical_and(mask_glac, np.ma.getmaskarray(slope))

                diff_glac_outlier = np.ma.array(diff_align, mask=~mask_glac_outlier)
                if diff_glac_outlier.count() > 0:
                    diff_align_glac_outlier = outlier_filter(np.ma.array(diff_align, mask=~mask_glac_outlier), f=2,
                                                             max_dz=100)
                    diff_align_glac_outlier[mask_glac_outlier == False] = diff_align[mask_glac_outlier == False]
                else:
                    diff_align_glac_outlier = np.ma.array(diff_align, mask=None)

                diff_align_filt_nonglac = np.ma.array(diff_align_glac_outlier, mask=mask_glac)
                diff_align_filt_compressed = diff_align[~mask_glac]
                diff_align_filt_nonglac = outlier_filter(diff_align_filt_nonglac, f=3, max_dz=max_dz)

                diff_align_filt_stats = malib.get_stats_dict(diff_align_filt_nonglac, full=True)

                diff_align_filt = np.ma.array(diff_align_filt_nonglac, mask=None)
                diff_align_filt_mask = np.ma.getmaskarray(diff_align_filt_nonglac)

                diff_align_filt[mask_glac == True] = diff_align_glac_outlier[mask_glac == True]

            # Fit 2D polynomial to residuals and remove
            # To do: add support for along-track and cross-track artifacts
            if tiltcorr and not tiltcorr_done:
                print("\n************")
                print("Calculating 'tiltcorr' 2D polynomial fit to residuals with order %i" % polyorder)
                print("************\n")
                gt = src_dem_clip_ds_align.GetGeoTransform()

                # Need to apply the mask here, so we're only fitting over static surfaces
                # Note that the origmask=False will compute vals for all x and y indices, which is what we want
                vals, resid, coeff = geolib.ma_fitpoly(diff_align_filt_nonglac, order=polyorder, gt=gt, perc=(2, 98),
                                                       origmask=False)
                # vals, resid, coeff = geolib.ma_fitplane(diff_align_filt, gt, perc=(12.5, 87.5), origmask=False)

                # Should write out coeff or grid with correction

                vals_stats = malib.get_stats_dict(vals)

                # Want to have max_tilt check here
                # max_tilt = 4.0 #m
                # Should do percentage
                # vals.ptp() > max_tilt

                # Note: dimensions of ds and vals will be different as vals are computed for clipped intersection
                # Need to recompute planar offset for full src_dem_ds_align extent and apply
                xgrid, ygrid = geolib.get_xy_grids(src_dem_ds_align)
                valgrid = geolib.polyval2d(xgrid, ygrid, coeff)
                # For results of ma_fitplane
                # valgrid = coeff[0]*xgrid + coeff[1]*ygrid + coeff[2]
                src_dem_ds_align = coreglib.apply_z_shift(src_dem_ds_align, -valgrid, createcopy=False)

                # if True:
                #     print("Creating plot of polynomial fit to residuals")
                #     fig, axa = plt.subplots(1,2, figsize=(8, 4))
                #     dz_clim = malib.calcperc_sym(vals, (2, 98))
                #     ax = pltlib.iv(diff_align_filt_nonglac, ax=axa[0], cmap='RdBu', clim=dz_clim, \
                #             label='Residual dz (m)', scalebar=False)
                #     ax = pltlib.iv(valgrid, ax=axa[1], cmap='RdBu', clim=dz_clim, \
                #             label='Polyfit dz (m)', ds=src_dem_ds_align)
                #     #if tiltcorr:
                #         #xyz_shift_str_cum_fn += "_tiltcorr"
                #     tiltcorr_fig_fn = outprefix + '%s_polyfit.png' % xyz_shift_str_cum_fn
                #     print("Writing out figure: %s\n" % tiltcorr_fig_fn)
                #     fig.savefig(tiltcorr_fig_fn, dpi=300)

                print("Applying tilt correction to difference map")
                diff_align -= vals

                # Should iterate until tilts are below some threshold
                # For now, only do one tiltcorr
                tiltcorr_done = True
                # Now use original tolerance, and number of iterations
                tol = args.tol
                max_iter = n + args.max_iter
            else:
                break

    if True:
        # Write out aligned difference map for clipped extent with vertial offset removed
        align_diff_fn = outprefix + '%s_align_diff.tif' % xyz_shift_str_cum_fn
        print("Writing out aligned difference map with median vertical offset removed")
        iolib.writeGTiff(diff_align, align_diff_fn, src_dem_clip_ds_align)

    if True:
        # Write out fitered aligned difference map
        align_diff_filt_fn = outprefix + '%s_align_diff_filt.tif' % xyz_shift_str_cum_fn
        print("Writing out filtered aligned difference map with median vertical offset removed")
        iolib.writeGTiff(diff_align_filt, align_diff_filt_fn, src_dem_clip_ds_align)

    # Extract final center coordinates for intersection
    center_coord_ll = geolib.get_center(src_dem_clip_ds_align, t_srs=geolib.wgs_srs)
    center_coord_xy = geolib.get_center(src_dem_clip_ds_align)
    src_dem_clip_ds_align = None

    # Write out final aligned src_dem
    align_fn = outprefix + '%s_align.tif' % xyz_shift_str_cum_fn
    print("Writing out shifted src_dem with median vertical offset removed: %s" % align_fn)
    # Open original uncorrected dataset at native resolution
    src_dem_ds = gdal.Open(src_dem_fn)
    src_dem_ds_align = iolib.mem_drv.CreateCopy('', src_dem_ds, 0)
    # Apply final horizontal and vertial shift to the original dataset
    # Note: potentially issues if we used a different projection during coregistration!
    src_dem_ds_align = coreglib.apply_xy_shift(src_dem_ds_align, dx_total, dy_total, createcopy=False)
    src_dem_ds_align = coreglib.apply_z_shift(src_dem_ds_align, dz_total, createcopy=False)
    if tiltcorr:
        xgrid, ygrid = geolib.get_xy_grids(src_dem_ds_align)
        valgrid = geolib.polyval2d(xgrid, ygrid, coeff)
        # For results of ma_fitplane
        # valgrid = coeff[0]*xgrid + coeff[1]*ygrid + coeff[2]
        src_dem_ds_align = coreglib.apply_z_shift(src_dem_ds_align, -valgrid, createcopy=False)
    # Might be cleaner way to write out MEM ds directly to disk
    src_dem_full_align = iolib.ds_getma(src_dem_ds_align)
    iolib.writeGTiff(src_dem_full_align, align_fn, src_dem_ds_align)

    if True:
        # Output final aligned src_dem, masked so only best pixels are preserved
        # Useful if creating a new reference product
        # Can also use apply_mask.py
        print("Applying filter to shifted src_dem")
        align_diff_filt_full_ds = \
            warplib.memwarp_multi_fn([align_diff_filt_fn, ], res=src_dem_ds_align, extent=src_dem_ds_align,
                                     t_srs=src_dem_ds_align)[0]
        align_diff_filt_full = iolib.ds_getma(align_diff_filt_full_ds)
        align_diff_filt_full_ds = None
        align_fn_masked = outprefix + '%s_align_filt.tif' % xyz_shift_str_cum_fn
        iolib.writeGTiff(np.ma.array(src_dem_full_align, mask=np.ma.getmaskarray(align_diff_filt_full)),
                         align_fn_masked, src_dem_ds_align)

    del src_dem_full_align
    del src_dem_ds_align

    # Compute original elevation difference
    if True:
        ref_dem_clip_ds, src_dem_clip_ds = warplib.memwarp_multi([ref_dem_ds, src_dem_ds],
                                                                 res=res, extent='intersection', t_srs=local_srs,
                                                                 r='cubic')
        # src_dem_ds = None
        ref_dem_ds = None
        ref_dem_orig = iolib.ds_getma(ref_dem_clip_ds)
        src_dem_orig = iolib.ds_getma(src_dem_clip_ds)
        # Needed for plotting
        ref_dem_hs = geolib.gdaldem_mem_ds(ref_dem_clip_ds, processing='hillshade', returnma=True, computeEdges=True)
        src_dem_hs = geolib.gdaldem_mem_ds(src_dem_clip_ds, processing='hillshade', returnma=True, computeEdges=True)
        diff_orig = src_dem_orig - ref_dem_orig
        # Only compute stats over valid surfaces
        static_mask_orig = get_mask(src_dem_clip_ds, mask_list, src_dem_fn)
        # Note: this doesn't include outlier removal or slope mask!
        static_mask_orig = np.logical_or(np.ma.getmaskarray(diff_orig), static_mask_orig)
        # For some reason, ASTER DEM diff have a spike near the 0 bin, could be an issue with masking?
        diff_orig_compressed = diff_orig[~static_mask_orig]
        diff_orig_stats = malib.get_stats_dict(diff_orig_compressed, full=True)

        # Prepare filtered version for comparison
        diff_orig_filt = np.ma.array(diff_orig, mask=static_mask_orig)
        diff_orig_filt = outlier_filter(diff_orig_filt, f=3, max_dz=max_dz)
        # diff_orig_filt = outlier_filter(diff_orig_filt, perc=(12.5, 87.5), max_dz=max_dz)
        slope = get_filtered_slope(src_dem_clip_ds)
        diff_orig_filt = np.ma.array(diff_orig_filt, mask=np.ma.getmaskarray(slope))
        diff_orig_filt_stats = malib.get_stats_dict(diff_orig_filt, full=True)

        # Write out original difference map
        print("Writing out original difference map for common intersection before alignment")
        orig_diff_fn = outprefix + '_orig_diff.tif'
        iolib.writeGTiff(diff_orig, orig_diff_fn, ref_dem_clip_ds)
        # src_dem_clip_ds = None
        ref_dem_clip_ds = None

    if True:
        align_stats_fn = outprefix + '%s_align_stats.json' % xyz_shift_str_cum_fn
        align_stats = {}
        align_stats['src_fn'] = src_dem_fn
        align_stats['ref_fn'] = ref_dem_fn
        align_stats['align_fn'] = align_fn
        align_stats['res'] = {}
        align_stats['res']['src'] = src_dem_res
        align_stats['res']['ref'] = ref_dem_res
        align_stats['res']['coreg'] = res
        align_stats['center_coord'] = {'lon': center_coord_ll[0], 'lat': center_coord_ll[1],
                                       'x': center_coord_xy[0], 'y': center_coord_xy[1]}
        align_stats['shift'] = {'dx': dx_total, 'dy': dy_total, 'dz': dz_total, 'dm': dm_total}
        # This tiltcorr flag gets set to false, need better flag
        if tiltcorr:
            align_stats['tiltcorr'] = {}
            align_stats['tiltcorr']['coeff'] = coeff.tolist()
            align_stats['tiltcorr']['val_stats'] = vals_stats
        align_stats['before'] = diff_orig_stats
        align_stats['before_filt'] = diff_orig_filt_stats
        align_stats['after'] = diff_align_stats
        align_stats['after_filt'] = diff_align_filt_stats

        import json
        with open(align_stats_fn, 'w') as f:
            json.dump(align_stats, f)

    # Create output plot
    if True:
        datadir = iolib.get_datadir()
        shp_fn = os.path.join(datadir, 'gamdam/gamdam_merge_refine_line.shp')
        shp_ds = ogr.Open(shp_fn)
        lyr = shp_ds.GetLayer()
        lyr_srs = lyr.GetSpatialRef()
        shp_extent = geolib.lyr_extent(lyr)
        ds_extent = geolib.ds_extent(src_dem_ds, t_srs=lyr_srs)
        if geolib.extent_compare(shp_extent, ds_extent) is False:
            ext = '_n' + str(int(center_coord_ll[0])) + '_n' + str(int(center_coord_ll[1])).zfill(3)
            # ext = os.path.splitext(os.path.split(ref_dem_fn)[-1])[0][4:13]
            out_fn = os.path.splitext(shp_fn)[0] + ext + '_clip.shp'
            geolib.clip_shp(shp_fn, extent=ds_extent, out_fn=out_fn)
            shp_fn = out_fn

        print("Creating final plot")
        # f, axa = plt.subplots(2, 4, figsize=(11, 8.5))
        f, axa = plt.subplots(2, 4, figsize=(16, 8))
        # for ax in axa.ravel()[:-1]:
        #     ax.set_facecolor('w')
        #     pltlib.hide_ticks(ax)
        dem_clim = malib.calcperc(ref_dem_orig, (2, 98))
        axa[0, 0].imshow(ref_dem_hs, cmap='gray')
        im = axa[0, 0].imshow(ref_dem_orig, cmap='terrain', clim=dem_clim, alpha=0.6)
        pltlib.add_cbar(axa[0, 0], im, arr=ref_dem_orig, clim=dem_clim, label=None)
        pltlib.add_scalebar(axa[0, 0], res=res[0])
        axa[0, 0].set_title('Reference DEM')
        axa[0, 0].set_facecolor('w')
        pltlib.hide_ticks(axa[0, 0])
        # pltlib.shp_overlay(axa[0,0], src_dem_clip_ds, shp_fn, color='k')

        axa[0, 1].imshow(src_dem_hs, cmap='gray')
        im = axa[0, 1].imshow(src_dem_orig, cmap='terrain', clim=dem_clim, alpha=0.6)
        pltlib.add_cbar(axa[0, 1], im, arr=src_dem_orig, clim=dem_clim, label=None)
        axa[0, 1].set_title('Source DEM')
        axa[0, 1].set_facecolor('w')
        pltlib.hide_ticks(axa[0, 1])
        # pltlib.shp_overlay(axa[0,1], src_dem_clip_ds, shp_fn, color='k')
        # axa[0,2].imshow(~static_mask_orig, clim=(0,1), cmap='gray')
        axa[0, 2].imshow(~mask_glac, clim=(0, 1), cmap='gray')
        axa[0, 2].set_title('Surfaces for co-registration')
        axa[0, 2].set_facecolor('w')
        pltlib.hide_ticks(axa[0, 2])

        dz_clim = malib.calcperc_sym(diff_align_filt[mask_glac], (1, 99))
        dz_clim_noglac = malib.calcperc_sym(diff_orig_compressed, (1, 99))

        # dz_clim = (-10, 10)
        # dz_clim_noglac = (-10, 10)

        # axa[0,3].imshow(~static_mask_gla, clim=(0,1), cmap='gray')
        # axa[0,3].set_title('static_mask_gla2')
        # # dz_clim = malib.calcperc_sym(diff_orig_compressed, (1, 99))
        bins = np.linspace(dz_clim_noglac[0], dz_clim_noglac[1], 256)
        # bins = np.linspace(-50, 50, 256)
        axa[0, 3].hist(diff_orig_compressed, bins, color='b', label='Before', alpha=0.5)
        # axa[1,3].hist(diff_align_compressed, bins, color='g', label='After', alpha=0.5)
        axa[0, 3].hist(diff_align_filt_compressed, bins, color='g', label='Filter', alpha=0.5)
        # axa[0, 3].set_xlim(*dz_clim_noglac)
        axa[0, 3].set_xlim(-50, 50)
        axa[0, 3].axvline(0, color='k', linewidth=0.5, linestyle=':')
        axa[0, 3].set_xlabel('Elev. Diff. (m)')
        axa[0, 3].set_ylabel('Count (px)')
        axa[0, 3].set_title("Source - Reference")
        before_str = 'Before\nmed: %0.2f\nnmad: %0.2f' % (diff_orig_stats['med'], diff_orig_stats['nmad'])
        axa[0, 3].text(0.05, 0.95, before_str, va='top', color='b', transform=axa[0, 3].transAxes, fontsize=8)
        # after_str = 'After\nmed: %0.2f\nnmad: %0.2f' % (diff_align_stats['med'], diff_align_stats['nmad'])
        # axa[1,3].text(0.05, 0.65, after_str, va='top', color='g', transform=axa[1,3].transAxes, fontsize=8)
        filt_str = 'Filter\nmed: %0.2f\nnmad: %0.2f' % (diff_align_filt_stats['med'], diff_align_filt_stats['nmad'])
        axa[0, 3].text(0.65, 0.95, filt_str, va='top', color='g', transform=axa[0, 3].transAxes, fontsize=8)

        axa[1, 0].imshow(ref_dem_hs, cmap='gray')
        im = axa[1, 0].imshow(diff_orig, cmap='cpt_rainbow_r', clim=dz_clim, alpha=0.6)
        pltlib.add_cbar(axa[1, 0], im, arr=diff_orig, clim=dz_clim, label=None)
        axa[1, 0].set_title('Elev. Diff. Before (m)')
        axa[1, 0].set_facecolor('w')
        pltlib.hide_ticks(axa[1, 0])
        # pltlib.shp_overlay(axa[1,0], src_dem_clip_ds, shp_fn, color='k')

        axa[1, 1].imshow(ref_dem_hs, cmap='gray')
        im = axa[1, 1].imshow(diff_align, cmap='cpt_rainbow_r', clim=dz_clim, alpha=0.6)
        pltlib.add_cbar(axa[1, 1], im, arr=diff_align, clim=dz_clim, label=None)
        axa[1, 1].set_title('Elev. Diff. After (m)')
        axa[1, 1].set_facecolor('w')
        pltlib.hide_ticks(axa[1, 1])
        # pltlib.shp_overlay(axa[1,1], src_dem_clip_ds, shp_fn, color='k')

        # tight_dz_clim = (-1.0, 1.0)
        # tight_dz_clim = (-10.0, 10.0)
        # tight_dz_clim = malib.calcperc_sym(diff_align_filt, (5, 95))
        # im = axa[1,2].imshow(diff_align_filt, cmap='cpt_rainbow', clim=tight_dz_clim)
        # pltlib.add_cbar(axa[1,2], im, arr=diff_align_filt, clim=tight_dz_clim, label=None)
        # axa[1,2].set_title('Elev. Diff. Remove. Outliers (m)')
        axa[1, 2].imshow(ref_dem_hs, cmap='gray')
        im = axa[1, 2].imshow(diff_align_filt, cmap='cpt_rainbow_r', clim=dz_clim, alpha=0.6)
        pltlib.add_cbar(axa[1, 2], im, arr=diff_align_filt, clim=dz_clim, label=None)
        axa[1, 2].set_title('Elev. Diff. Remove. Outliers (m)')
        axa[1, 2].set_facecolor('w')
        pltlib.hide_ticks(axa[1, 2])
        # pltlib.shp_overlay(axa[1,2], src_dem_clip_ds, shp_fn, color='k')

        tight_dz_clim = (-10, 10)
        axa[1, 3].imshow(ref_dem_hs, cmap='gray')
        im = axa[1, 3].imshow(diff_align_filt_nonglac, cmap='cpt_rainbow_r', clim=tight_dz_clim, alpha=0.6)
        pltlib.add_cbar(axa[1, 3], im, arr=diff_align_filt_nonglac, clim=tight_dz_clim, label=None)
        axa[1, 3].set_title('Elev. Diff. NoGlac (m)')
        axa[1, 3].set_facecolor('w')
        pltlib.hide_ticks(axa[1, 3])

        # Tried to insert Nuth fig here
        # ax_nuth.change_geometry(1,2,1)
        # f.axes.append(ax_nuth)

        suptitle = '%s\nx: %+0.2fm, y: %+0.2fm, z: %+0.2fm' % (
            os.path.split(outprefix)[-1], dx_total, dy_total, dz_total)
        f.suptitle(suptitle)
        f.tight_layout()
        plt.subplots_adjust(top=0.90)

        fig_fn = outprefix + '%s_align.png' % xyz_shift_str_cum_fn
        print("Writing out figure: %s" % fig_fn)
        f.savefig(fig_fn, dpi=450)

        if True:
            fig2 = plt.figure(0)
            ax = fig2.add_subplot(1, 1, 1)
            ax.imshow(ref_dem_hs, cmap='gray')
            im = ax.imshow(diff_align_filt, cmap='cpt_rainbow_r', clim=dz_clim, alpha=0.6)
            pltlib.add_cbar(ax, im, arr=diff_align_filt, clim=dz_clim, label=None)
            ax.set_title('cLon: %0.1fE    cLat: %0.1fN\n\nElev. Diff. After. Coreg. (m)' % (
                center_coord_ll[1], center_coord_ll[0]))
            ax.set_facecolor('w')
            pltlib.hide_ticks(ax)
            # pltlib.latlon_ticks(ax, lat_in=0.25, lon_in=0.25, in_crs=local_srs.ExportToProj4())
            pltlib.shp_overlay(ax, src_dem_clip_ds, shp_fn, color='k')

            fig2_fn = outprefix + '_align_diff.png'
            fig2.savefig(fig2_fn, dpi=600, bbox_inches='tight', pad_inches=0.1)
Beispiel #8
0
    glas_pts = np.loadtxt(glas_csv_fn, delimiter=',', skiprows=1, dtype=None)
    print("Saving npz: %s" % glas_npz_fn)
    np.savez_compressed(glas_npz_fn, glas_pts)
else:
    #This takes ~5 seconds to load ~9M records with 8 fields
    print("Loading npz: %s" % glas_npz_fn)
    glas_pts = np.load(glas_npz_fn)['arr_0']

dem_fn_list = sys.argv[1:]
for n,dem_fn in enumerate(dem_fn_list):
    print("%i of %i" % (n+1, len(dem_fn_list)))
    #Lat/lon extent filter
    print("Loading DEM: %s" % dem_fn)
    dem_ds = gdal.Open(dem_fn)
    dem_ma = iolib.ds_getma(dem_ds)
    dem_extent_wgs84 = geolib.ds_extent(dem_ds, t_srs=pt_srs)
    xmin, ymin, xmax, ymax = dem_extent_wgs84
    print("Applying spatial filter") 
    x = glas_pts[:,xcol]
    y = glas_pts[:,ycol]
    idx = ((x >= xmin) & (x <= xmax) & (y >= ymin) & (y <= ymax)) 
    if idx.nonzero()[0].size == 0:
        print("No points after spatial filter")
        continue

    print("Sampling DEM at masked point locations") 
    glas_pts_fltr = glas_pts[idx]

    print("Writing out %i points after spatial filter" % glas_pts_fltr.shape[0]) 
    out_csv_fn = os.path.splitext(dem_fn)[0]+'_%s.csv' % ext
Beispiel #9
0
def crop_sim_res_extent(img_list, outfol, vrt=False, rpc=False):
    """
    Warp images to common 'finest' resolution and intersecting extent
    This is useful for stereo processing with mapprojected imagery with the skysat pairs

    Parameters
    ----------
    img_list: list
        list containing two images
    outfol: str
        path to folder where warped images will be saved
    vrt: bool
        Produce warped VRT instead of geotiffs if True
    rpc: bool
        Copy RPC information to warped images if True
    Returns
    ----------
    out: list
        list containing the two warped images, first entry (left image) is the image which was of finer resolution (more nadir) initially
        If the images do not intersect, two None objects are returned in the list
    """
    resample_alg = 'lanczos'
    img1 = img_list[0]
    img2 = img_list[1]
    img1_ds = iolib.fn_getds(img1)
    img2_ds = iolib.fn_getds(img2)
    res1 = geolib.get_res(img1_ds, square=True)[0]
    res2 = geolib.get_res(img2_ds, square=True)[0]
    # set left image as higher resolution, this is repeated for video, but
    # good for triplet with no gsd information
    if res1 < res2:
        l_img = img1
        r_img = img2
        res = res1
    else:
        l_img = img2
        r_img = img1
        res = res2
    # ASP stereo command expects the input to be .tif/.tiff, complains for .vrt
    # Try to save with vrt driver but a tif extension ?
    l_img_warp = os.path.join(
        outfol,
        os.path.splitext(os.path.basename(l_img))[0] + '_warp.tif')
    r_img_warp = os.path.join(
        outfol,
        os.path.splitext(os.path.basename(r_img))[0] + '_warp.tif')
    if not (os.path.exists(l_img_warp)):
        # can turn on verbose during qa/qc
        # Better to turn off during large runs, writing takes time
        verbose = False
        if not os.path.exists(outfol):
            os.makedirs(outfol)
        try:
            #this will simply break and continue if the images do not intersect
            ds_list = warplib.memwarp_multi_fn([l_img, r_img],
                                               r=resample_alg,
                                               verbose=verbose,
                                               res='min',
                                               extent='intersection')
            if vrt:
                extent = geolib.ds_extent(ds_list[0])
                res = geolib.get_res(ds_list[0], square=True)
                vrt_options = gdal.BuildVRTOptions(resampleAlg='average',
                                                   resolution='user',
                                                   xRes=res[0],
                                                   yRes=res[1],
                                                   outputBounds=tuple(extent))
                l_vrt = gdal.BuildVRT(l_img_warp, [
                    l_img,
                ],
                                      options=vrt_options)
                r_vrt = gdal.BuildVRT(r_img_warp, [
                    r_img,
                ],
                                      options=vrt_options)
                # close vrt to save to disk
                l_vrt = None
                r_vrt = None
                out = [l_img_warp, r_img_warp]
            else:
                # I am opting out of writing out vrt, to prevent correlation
                # artifacts. GeoTiffs will be written out in the meantime
                l_img_ma = iolib.ds_getma(ds_list[0])
                r_img_ma = iolib.ds_getma(ds_list[1])
                iolib.writeGTiff(l_img_ma, l_img_warp, ds_list[0])
                iolib.writeGTiff(r_img_ma, r_img_warp, ds_list[1])
                out = [l_img_warp, r_img_warp]
                del (ds_list)
                if rpc:
                    copy_rpc(l_img, l_img_warp)
                    copy_rpc(r_img, r_img_warp)
        except BaseException:
            out = None
    else:
        out = [l_img_warp, r_img_warp]
    return out
Beispiel #10
0
        description=desc_str,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-t_srs',
                        type=str,
                        default=None,
                        help='Proj4 string for desired output coordinates')
    parser.add_argument('-pad', type=float, default=None, \
            help='Width of padding to be applied to extent (meters, or units of specified t_srs')
    parser.add_argument('fn', type=str, help='Raster filename')
    return parser


parser = getparser()
args = parser.parse_args()

ds = gdal.Open(args.fn)

t_srs = None
if args.t_srs is not None:
    t_srs = warplib.parse_srs(args.t_srs)

extent = geolib.ds_extent(ds, t_srs=t_srs)
#extent = geolib.ds_geom_extent(ds)

#Pad by desired amount
if args.pad is not None:
    extent = geolib.pad_extent(extent, width=args.pad)

#Print to stdout
print(' '.join(map(str, extent)))