def update(self): self.FRED._open_ds() v_zip = os.path.basename(self._arctic_dem_index_url) status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if '.shp' in v: v_shp = v utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True) utils.remove_glob(v_zip, *v_shps) v_shp = 'arctic_tmp.shp' v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj'] shp_regions = regions.gdal_ogr_regions(v_shp) shp_region = regions.Region() for this_region in shp_regions: #this_region.src_srs = 'epsg:3413' #this_region.warp('epsg:4326') if shp_region.valid_p(check_xy=True): shp_region = regions.regions_merge(shp_region, this_region) else: shp_region = this_region geom = shp_region.export_as_geom() self.FRED._attribute_filter(["ID = '{}'".format('ARCTICDEM-1')]) if self.FRED.layer is None or len(self.FRED.layer) == 0: self.FRED._add_survey(Name = 'ArcticDEM', ID = 'ARCTICDEM-1', Agency = 'UMN', Date = utils.this_year(), MetadataLink = 'https://data.pgc.umn.edu/elev/dem/setsm/ArcticDEM/', MetadataDate = utils.this_year(), DataLink = self._arctic_dem_index_url, IndexLink = self._arctic_dem_index_url, DataType = 'raster', DataSource = 'arcticdem', Info = 'Arctic Only', geom = geom) utils.remove_glob(*v_shps) self.FRED._close_ds()
def _gmt_select_split(self, o_xyz, sub_region, sub_bn, verbose = False): """split an xyz file into an inner and outer region. Args: o_xyz (str): a pathname to an xyz file sub_region (list): a region list [xmin, xmax, ymin, ymax] sub_bn (str): a basename for the selected data verbose (bool): increase verbosity Returns: list: [inner_region, outer_region] TODO: update for GDAL. """ out_inner = None out_outer = None gmt_s_inner = 'gmt gmtselect -V {} {} > {}_inner.xyz'.format(o_xyz, sub_region.format('gmt'), sub_bn) out, status = utils.run_cmd(gmt_s_inner, verbose = verbose) if status == 0: out_inner = '{}_inner.xyz'.format(sub_bn) gmt_s_outer = 'gmt gmtselect -V {} {} -Ir > {}_outer.xyz'.format(o_xyz, sub_region.format('gmt'), sub_bn) out, status = utils.run_cmd(gmt_s_outer, verbose=verbose) if status == 0: out_outer = '{}_outer.xyz'.format(sub_bn) return([out_inner, out_outer])
def _load_background(self): """GSHHG/GMRT - Global low-res Used to fill un-set cells. """ if wg['gc']['GMT'] is not None and not self.want_gmrt: utils.run_cmd('gmt grdlandmask {} -I{} -r -Df -G{}=gd:GTiff -V -N1/0/1/0/1\ '.format(self.p_region.format('gmt'), self.inc, self.g_mask), verbose=self.verbose) else: this_gmrt = gmrt.GMRT(src_region=self.p_region, weight=self.weight, verbose=self.verbose, layer='topo-mask').run() #gmrt_tif = this_gmrt.results[0] this_gmrt.fetch_results() utils.run_cmd('gdalwarp {} {} -tr {} {} -overwrite'.format(gmrt_tif, g_mask, wg['inc'], wg['inc']), verbose = True) #utils.remove_glob(gmrt_tif) ## ============================================== ## update wet/dry mask with gsshg/gmrt data ## speed up! ## ============================================== utils.echo_msg('filling the coast mask with gsshg/gmrt data...') c_ds = gdal.Open(self.g_mask) c_ds_arr = c_ds.GetRasterBand(1).ReadAsArray() #c_ds = gdal.Open(self.g_mask) for this_xyz in gdalfun.gdal_parse(c_ds): xpos, ypos = utils._geo2pixel(this_xyz.x, this_xyz.y, self.dst_gt) try: if self.coast_array[ypos, xpos] == self.ds_config['ndv']: if this_xyz.z == 1: self.coast_array[ypos, xpos] = 0 elif this_xyz.z == 0: self.coast_array[ypos, xpos] = 1 except: pass c_ds = None utils.remove_glob('{}*'.format(self.g_mask))
def _write_coast_poly(self): """convert to coast_array vector""" tmp_ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource('tmp_c_{}.shp'.format(self.name)) if tmp_ds is not None: tmp_layer = tmp_ds.CreateLayer('tmp_c_{}'.format(self.name), None, ogr.wkbMultiPolygon) tmp_layer.CreateField(ogr.FieldDefn('DN', ogr.OFTInteger)) demfun.polygonize('{}.tif'.format(self.name), tmp_layer, verbose=self.verbose) tmp_ds = None utils.run_cmd('ogr2ogr -dialect SQLITE -sql "SELECT * FROM tmp_c_{} WHERE DN=0 order by ST_AREA(geometry) desc limit 8"\ {}.shp tmp_c_{}.shp'.format(self.name, self.name, self.name), verbose = True) utils.remove_glob('tmp_c_{}.*'.format(self.name)) utils.run_cmd('ogrinfo -dialect SQLITE -sql "UPDATE {} SET geometry = ST_MakeValid(geometry)" {}.shp\ '.format(self.name, self.name))
def vdatum_xyz(self, xyz): """run vdatum on an xyz list [x, y, z] returns the transformed xyz list """ if self.jar is None: self.vdatum_locate_jar() if self.jar is not None: vdc = 'ihorz:{} ivert:{} ohorz:{} overt:{} -nodata -pt:{},{},{} {}region:{}\ '.format( self.ihorz, self.ivert, self.ohorz, self.overt, xyz[0], xyz[1], xyz[2], 'epoch:{} '.format(self.epoch) if self.epoch is not None else '', self.region) out, status = utils.run_cmd( 'java -Djava.awt.headless=false -jar {} {}'.format( self.jar, vdc), verbose=False) for i in out.split('\n'): if 'Height/Z' in i: z = float(i.split()[2]) break return ([xyz[0], xyz[1], z]) else: return (xyz)
def run(self): for xdl in self.data: for x in xdl.data_lists.keys(): xdl.data_entries = xdl.data_lists[x] dl_name = x o_v_fields = [ dl_name, 'Unknown', '0', 'xyz_elevation', 'Unknown', 'WGS84', 'NAVD88', 'URL' ] defn = None if self.layer is None else self.layer.GetLayerDefn( ) [x for x in xdl.mask_xyz('{}.tif'.format(dl_name), sm_inc)] if demfun.infos('{}.tif'.format(dl_name), scan=True)['zr'][1] == 1: tmp_ds = ogr.GetDriverByName( 'ESRI Shapefile').CreateDataSource( '{}_poly.shp'.format(dl_name)) if tmp_ds is not None: tmp_layer = tmp_ds.CreateLayer( '{}_poly'.format(dl_name), None, ogr.wkbMultiPolygon) tmp_layer.CreateField( ogr.FieldDefn('DN', ogr.OFTInteger)) demfun.polygonize('{}.tif'.format(dl_name), tmp_layer, verbose=self.verbose) if len(tmp_layer) > 1: if defn is None: defn = tmp_layer.GetLayerDefn() out_feat = gdal_ogr_mask_union( tmp_layer, 'DN', defn) [ out_feat.SetField(f, o_v_fields[i]) for i, f in enumerate(self.v_fields) ] self.layer.CreateFeature(out_feat) tmp_ds = None utils.remove_glob('{}_poly.*'.format(dl_name), 'tmp.tif') self.ds = None utils.run_cmd( 'ogrinfo -spat {} -dialect SQLITE -sql "UPDATE {} SET geometry = ST_MakeValid(geometry)" {}\ '.format(dr.format('ul_lr'), self.dst_layer, self.dst_vector))
def _burn_region(self): """wet/dry datalist mask or burn region.""" if len(self.data) < 0: c_region = self.d_region c_region.zmin = -1 c_region.zmax = 1 waffles.WafflesNum(data=self.data, src_region=c_region, inc=self.inc, name=self.w_name, extend=self.extend, extend_proc=self.extend_proc, weights=self.weights, sample=self.sample, clip=self.clip, epsg=self.epsg, verbose=self.verbose, **kwargs, mode='w').run() else: c_region.export_as_ogr('region_buff.shp') xsize, ysize, gt = c_region.geo_transform(x_inc=self.inc) utils.run_cmd('gdal_rasterize -ts {} {} -te {} -burn -9999 -a_nodata -9999 \ -ot Int32 -co COMPRESS=DEFLATE -a_srs EPSG:{} region_buff.shp {}\ '.format(xsize, ysize, c_region.format('te'), self.epsg, self.w_mask), verbose=self.verbose)
def extract_region(self, in_ogr, out_ogr=None): if out_ogr is None: out_ogr = '{}_{}.shp'.format('.'.join(in_ogr.split('.')[:-1]), self.region.format('fn')) out, status = utils.run_cmd( 'ogr2ogr {} {} -clipsrc {} -nlt POLYGON -skipfailures'.format( out_ogr, in_ogr, self.region.format('ul_lr')), verbose=True) return (out_ogr)
def _cdn_transform(self, epsg=None, name=None, invert=False): """create a cdn transofrmation grid""" if epsg is not None: cdn_results = cudem.fetches.vdatum.search_proj_cdn(self.src_region, epsg=epsg, cache_dir=self.cache_dir) else: cdn_results = cudem.fetches.vdatum.search_proj_cdn(self.src_region, cache_dir=self.cache_dir) for _result in cdn_results: for g in _geoids: if g in _result['name']: cdn_results = [_result] if len(cdn_results) > 0: for _result in cdn_results: src_code = int(_result['source_crs_code'].split(':')[-1]) dst_code = int(_result['target_crs_code'].split(':')[-1]) #if epsg == dst_code or epsg == src_code or np.any([g in _result['name'] for g in self._geoids]): if epsg == dst_code or np.any([g in _result['name'] for g in _geoids]): if src_code in _htdp_reference_frames.keys(): _trans_grid = os.path.join(self.cache_dir, _result['name']) if cudem.fetches.utils.Fetch(_result['url'], verbose=self.verbose).fetch_file(_trans_grid) == 0: tmp_infos = demfun.infos(_trans_grid) tmp_region = regions.Region().from_geo_transform(tmp_infos['geoT'], tmp_infos['nx'], tmp_infos['ny']) if os.path.exists('_{}'.format(os.path.basename(_trans_grid))): utils.remove_glob('_{}'.format(os.path.basename(_trans_grid))) utils.run_cmd('gdalwarp {} {} -s_srs epsg:4326 -te {} -ts {} {} --config CENTER_LONG 0'.format( _trans_grid, '_{}'.format(os.path.basename(_trans_grid)), self.src_region.format('te'), self.xcount, self.ycount ), verbose=True) _tmp_array, _tmp_infos = demfun.get_array('_{}'.format(os.path.basename(_trans_grid))) utils.remove_glob('_{}'.format(os.path.basename(_trans_grid))) if invert: _tmp_array = _tmp_array * -1 return(_tmp_array, src_code) utils.echo_error_msg('failed to locate transformation for {}'.format(epsg)) return(np.zeros( (self.ycount, self.xcount) ), epsg)
def yield_xyz(self, entry): src_zip = entry[1] src_gtx = entry[2] src_tif = '{}.tif'.format(utils.fn_basename(src_zip, 'zip')) if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_zip) == 0: v_gtxs = utils.p_f_unzip(src_zip, [src_gtx]) utils.run_cmd('gdalwarp {} {} --config CENTER_LONG 0'.format( src_gtx, src_tif)) _ds = datasets.RasterFile( fn=src_tif, data_format=200, dst_srs=self.dst_srs, src_srs=None, #name=src_tif, src_region=self.region, verbose=self.verbose) for xyz in _ds.yield_xyz(): yield (xyz) utils.remove_glob(*v_gtxs, src_tif, src_zip)
def run(self): print(self.arctic_region) for surv in FRED._filter_FRED(self): v_zip = os.path.basename(self._arctic_dem_index_url) status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True) utils.remove_glob(v_zip, *v_shps) v_shp = 'arctic_tmp.shp' v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj'] try: v_ds = ogr.Open(v_shp) except: v_ds = None status = -1 if v_ds is not None: layer = v_ds.GetLayer() try: self.FRED.layer.SetAttributeFilter("Name = '{}'".format(name)) except: pass fcount = layer.GetFeatureCount() for f in range(0, fcount): feature = layer[f] #if data_link is not None: geom = feature.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): #data_link = feature.GetField('Ftp_dtm').replace('http', 'ftp') data_link = feature.GetField('fileurl') self.results.append([data_link, os.path.join(self._outdir, data_link.split('/')[-1]), surv['DataType']]) utils.remove_glob(*v_shps) return(self)
def vdatum_get_version(self): """run vdatum and attempt to get it's version return the vdatum version or None """ if self.jar is None: self.vdatum_locate_jar() if self.jar is not None: out, status = utils.run_cmd('java -jar {} {}'.format( self.jar, '-'), verbose=self.verbose) for i in out.decode('utf-8').split('\n'): if '- v' in i.strip(): return (i.strip().split('v')[-1]) return (None)
def acquire(self): if self.mod == 'dlim': return(dlim.datalists_cli(self.mod_args)) if self.mod == 'waffles': return(waffles.waffles_cli(self.mod_args)) if self.mod == 'regions': return(regions.regions_cli(self.mod_args)) if self.mod == 'spatial_metadata': return(metadata.spat_meta_cli(self.mod_args)) if self.mod == 'uncertainties': return(uncertainties.uncertainties_cli(self.mod_args)) if self.mod == 'fetches': return(fetches.fetches_cli(self.mod_args)) if self.mod in self.mods.keys(): return(utils.run_cmd(' '.join(self.mod_args), verbose=True))
def run_vdatum(self, src_fn): """run vdatum on src_fn which is an XYZ file use vd_config to set vdatum parameters. returns [command-output, command-return-code] """ if self.jar is None: self.vdatum_locate_jar() if self.jar is not None: vdc = 'ihorz:{} ivert:{} ohorz:{} overt:{} -nodata -file:txt:{},{},skip{}:{}:{} {}region:{}\ '.format( self.ihorz, self.ivert, self.ohorz, self.overt, self.delim, self.xyzl, self.skip, src_fn, self.result_dir, 'epoch:{} '.format(self.epoch) if self.epoch is not None else '', self.region) #return(utils.run_cmd('java -Djava.awt.headless=true -jar {} {}'.format(self.jar, vdc), verbose=self.verbose)) return (utils.run_cmd('java -jar {} {}'.format(self.jar, vdc), verbose=self.verbose)) else: return ([], -1)
def run(self): s_dp = s_ds = None unc_out = {} zones = ['low-dens','mid-dens','high-dens','low-slp','mid-slp','high-slp'] utils.echo_msg('running INTERPOLATION uncertainty module using {}...'.format(self.dem.mod)) if self.prox is None: self._gen_prox() # if self.slope is None: # self._gen_slope() ## ============================================== ## region and der. analysis ## ============================================== self.region_info = {} msk_ds = gdal.Open(self.dem.mask_fn) num_sum, g_max, num_perc = self._mask_analysis(msk_ds) msk_ds = None self.prox_percentile = demfun.percentile(self.prox, self.percentile) self.prox_perc_33 = demfun.percentile(self.prox, 25) self.prox_perc_66 = demfun.percentile(self.prox, 75) self.prox_perc_100 = demfun.percentile(self.prox, 100) # self.slp_percentile = demfun.percentile(self.slope, self.percentile) # self.slp_perc_33 = demfun.percentile(self.slope, 25) # self.slp_perc_66 = demfun.percentile(self.slope, 75) # self.slp_perc_100 = demfun.percentile(self.slope, 100) #self.region_info[self.dem.name] = [self.dem.region, g_max, num_sum, num_perc, self.prox_percentile, self.slp_percentile] self.region_info[self.dem.name] = [self.dem.region, g_max, num_sum, num_perc, self.prox_percentile] for x in self.region_info.keys(): utils.echo_msg('region: {}: {}'.format(x, self.region_info[x])) ## ============================================== ## chunk region into sub regions ## ============================================== chnk_inc = int((self.region_info[self.dem.name][1] / math.sqrt(g_max)) / self.region_info[self.dem.name][3]) #chnk_inc = 250 sub_regions = self.dem.region.chunk(self.dem.inc, chnk_inc) utils.echo_msg('chunked region into {} sub-regions @ {}x{} cells.'.format(len(sub_regions), chnk_inc, chnk_inc)) ## ============================================== ## sub-region analysis ## ============================================== sub_zones = self._sub_region_analysis(sub_regions) ## ============================================== ## sub-region density and percentiles ## ============================================== s_dens = np.array([sub_zones[x][3] for x in sub_zones.keys()]) s_5perc = np.percentile(s_dens, 5) s_dens = None utils.echo_msg('Sampling density for region is: {:.16f}'.format(s_5perc)) ## ============================================== ## zone analysis / generate training regions ## ============================================== trainers = [] t_perc = 95 s_perc = 50 for z, this_zone in enumerate(self._zones): #print(sub_zones) sub_zones[x][8] (with slope) tile_set = [sub_zones[x] for x in sub_zones.keys() if sub_zones[x][5] == self._zones[z]] if len(tile_set) > 0: d_50perc = np.percentile(np.array([x[3] for x in tile_set]), 50) else: continue t_trainers = [x for x in tile_set if x[3] < d_50perc or abs(x[3] - d_50perc) < 0.01] utils.echo_msg('possible {} training zones: {} @ MAX {}'.format(self._zones[z].upper(), len(t_trainers), d_50perc)) trainers.append(t_trainers) utils.echo_msg('sorting training tiles by distance...') trains = self._regions_sort(trainers, verbose = False) tot_trains = len([x for s in trains for x in s]) utils.echo_msg('sorted sub-regions into {} training tiles.'.format(tot_trains)) utils.echo_msg('analyzed {} sub-regions.'.format(len(sub_regions))) ## ============================================== ## split-sample simulations and error calculations ## sims = max-simulations ## ============================================== if self.sims is None: self.sims = int(len(sub_regions)/tot_trains) ec_d = self._split_sample(trains, s_5perc)[0] ## ============================================== ## Save/Output results ## apply error coefficient to full proximity grid ## TODO: USE numpy/gdal instead! ## ============================================== utils.echo_msg('applying coefficient to PROXIMITY grid') if self.dem.gc['GMT'] is None: utils.run_cmd('gdal_calc.py -A {} --outfile {}_prox_unc.tif --calc "{}+({}*(A**{}))"'.format(self.prox, self.dem.name, 0, ec_d[1], ec_d[2]), verbose = True) else: math_cmd = 'gmt grdmath {} 0 AND ABS {} POW {} MUL {} ADD = {}_prox_unc.tif=gd+n-9999:GTiff\ '.format(self.prox, ec_d[2], ec_d[1], 0, self.dem.name) utils.run_cmd(math_cmd, verbose = self.dem.verbose) if self.dem.epsg is not None: status = demfun.set_epsg('{}_prox_unc.tif'.format(self.dem.name), epsg=self.dem.epsg) utils.echo_msg('applied coefficient {} to PROXIMITY grid'.format(ec_d)) # utils.echo_msg('applying coefficient to SLOPE grid') # if self.dem.gc['GMT'] is None: # utils.run_cmd('gdal_calc.py -A {} --outfile {}_slp_unc.tif --calc "{}+({}*(A**{}))"'.format(self.slope, self.dem.name, 0, ec_s[1], ec_s[2]), verbose = True) # else: # math_cmd = 'gmt grdmath {} 0 AND ABS {} POW {} MUL {} ADD = {}_slp_unc.tif=gd+n-9999:GTiff\ # '.format(self.slope, ec_s[2], ec_s[1], 0, self.dem.name) # utils.run_cmd(math_cmd, verbose = self.dem.verbose) # if self.dem.epsg is not None: status = demfun.set_epsg('{}_prox_unc.tif'.format(self.dem.name), epsg=self.dem.epsg) # utils.echo_msg('applied coefficient {} to SLOPE grid'.format(ec_s)) utils.remove_glob(self.prox) #utils.remove_glob(self.slope) unc_out['prox_unc'] = ['{}_prox_unc.tif'.format(self.dem.name), 'raster'] unc_out['prox_bf'] = ['{}_prox_bf.png'.format(self.dem.name), 'image'] unc_out['prox_scatter'] = ['{}_prox_scatter.png'.format(self.dem.name), 'image'] return(unc_out, 0)
def run(self, htdp_control): utils.run_cmd('{} < {}'.format(self.htdp_bin, htdp_control), verbose=self.verbose)
def yield_xyz(self, entry): src_data = os.path.basename(entry[1]) src_mb = src_data[:-4] try: survey, src_data, mb_fmt, mb_perc, mb_date = self.parse_entry_inf(entry) except TypeError: return this_inf = self.parse_entry_inf(entry) if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_data) == 0: src_xyz = os.path.basename(src_data) + '.xyz' if not self.process: this_weight = self.weight out, status = utils.run_cmd('mblist -OXYZ -I{} -M{} > {}'.format(src_data, 'X{}'.format(self.exclude) if self.exclude is not None else 'A', src_xyz), verbose=True) else: #this_weight = (float(mb_perc) * (1 + (2*((int(mb_date)-2015)/100))))/100. this_year = int(utils.this_year()) if self.min_year is None else self.min_year this_weight = float(mb_perc) * ((int(mb_date)-2000)/(this_year-2000))/100. if this_weight <= 0.: this_weight = 0.0000001 #this_weight = (float(mb_perc) * ((int(mb_date)-2000)/int(this_year) - 2000))/100. out, status = utils.run_cmd('mblist -OXYZ -I{} -MX{} > {}'.format(src_data, str(100-float(mb_perc)), src_xyz), verbose=True) if status != 0: if f_utils.Fetch('{}.inf'.format(entry[0]), callback=self.callback, verbose=self.verbose).fetch_file('{}.inf'.format(src_mb)) == 0: mb_fmt = self.mb_inf_data_format('{}.inf'.format(src_mb)) mb_date = self.mb_inf_data_date('{}.inf'.format(src_mb)) out, status = utils.run_cmd('mblist -F{} -OXYZ -I{} -MX{} > {}'.format(mb_fmt, src_data, str(100-float(mb_perc)), src_xyz), verbose=True) if status == 0: _ds = datasets.XYZFile( fn=src_xyz, delim='\t', data_format=168, src_srs='epsg:4326', dst_srs=self.dst_srs, #name=os.path.basename(entry[1]), src_region=self.region, verbose=self.verbose, weight=this_weight, remote=True ) if self.inc is not None: xyz_func = lambda p: _ds.dump_xyz(dst_port=p, encode=True) for xyz in utils.yield_cmd( 'gmt blockmedian -I{:.10f} {} -r -V'.format( self.inc, self.region.format('gmt') ), verbose=self.verbose, data_fun=xyz_func ): yield(xyzfun.XYZPoint().from_list([float(x) for x in xyz.split()])) else: for xyz in _ds.yield_xyz(): yield(xyz) utils.remove_glob(src_data, '{}*'.format(src_xyz), '{}*.inf'.format(src_mb)) else: utils.echo_error_msg('failed to process local file, {} [{}]...'.format(src_data, entry[0])) with open( '{}'.format(os.path.join(self._outdir, 'fetch_{}_{}.err'.format(self.name, self.region.format('fn')))), 'a' ) as mb_err: mb_err.write('{}\n'.format(','.join([src_mb, entry[0]]))) os.rename(src_data, os.path.join(self._outdir, src_data)) utils.remove_glob(src_xyz) else: utils.echo_error_msg( 'failed to fetch remote file, {}...'.format(src_data) )
def main(): src_grid = None dst_grid = None vdatum_in = 5703 vdatum_out = 7662 verbose = False keep_cache = False cache_dir = utils.cudem_cache i = 1 argv = sys.argv while i < len(argv): arg = argv[i] if arg == '-i' or arg == '--vdatum_in': vdatum_in = argv[i + 1] i = i + 1 elif arg == '-o' or arg == '--vdatum_out': vdatum_out = argv[i + 1] i = i + 1 elif arg == '--cache-dir' or arg == '-D' or arg == '-cache-dir': cache_dir = os.path.join(utils.str_or(argv[i + 1], os.path.expanduser('~')), '.cudem_cache') i = i + 1 elif arg[:2] == '-D': cache_dir = os.path.join(utils.str_or(argv[i + 1], os.path.expanduser('~')), '.cudem_cache') elif arg == '--list-epsg': #print(_epsg_desc(htdpfun.HTDP()._reference_frames)) print(_epsg_desc('htdp epsg', vdatums._htdp_reference_frames)) print(_epsg_desc('cdn espg', vdatums._cdn_reference_frames)) print(_epsg_desc('tidal epsg', vdatums._tidal_frames)) #list_proj_cdn_epsg() sys.exit(1) elif arg == '-k' or arg == '--keep-cache': keep_cache = True elif arg == '--verbose': verbose = True elif arg == '-help' or arg == '--help' or arg == '-h': print(_usage) sys.exit(1) elif arg == '-version' or arg == '--version': print('vertical_datum_convert.py, version {}'.format(_version)) sys.exit(1) elif src_grid is None: src_grid = arg elif dst_grid is None: dst_grid = arg else: sys.stderr.write(_usage) sys.exit(1) i = i + 1 if src_grid is None: sys.stderr.write(_usage) sys.exit(1) if dst_grid is None: dst_grid = '.'.join(src_grid.split('.')[:-1]) + '_' + str(vdatum_out.replace('(', '_').replace(')', '_')) + '.' + src_grid.split('.')[-1] if not os.path.exists(src_grid): utils.echo_error_msg('Error: {} is not a valid file'.format(src_grid)) else: src_infos = demfun.infos(src_grid) src_region = regions.Region().from_geo_transform(src_infos['geoT'], src_infos['nx'], src_infos['ny']) src_region.src_srs = demfun.get_srs(src_grid) src_region.warp() x_inc, y_inc = src_region.increments(src_infos['nx'], src_infos['ny']) tmp_x_inc, tmp_y_inc = src_region.increments(src_infos['nx']/10, src_infos['ny']/10) vt = vdatums.VerticalTransform(src_region, tmp_x_inc, tmp_y_inc, vdatum_in, vdatum_out, cache_dir=cache_dir) _trans_grid = vt.run() if _trans_grid is not None: utils.run_cmd('gdalwarp {} {} -ts {} {} -s_srs epsg:4326 -t_srs {}'.format(_trans_grid, '_{}'.format(_trans_grid), src_infos['nx'], src_infos['ny'], demfun.get_srs(src_grid)), verbose=True) utils.run_cmd('gdal_calc.py -A {} -B {} --calc "A+B" --outfile {}'.format(src_grid.replace(' ', '\ '), '_{}'.format(_trans_grid).replace(' ', '\ '), dst_grid.replace(' ', '\ ')), verbose=True) utils.remove_glob(_trans_grid, '_{}'.format(_trans_grid)) else: utils.echo_error_msg('could not parse input/output vertical datums: {} -> {}; check spelling, etc'.format(vdatum_in, vdatum_out)) if not keep_cache: utils.remove_glob(cache_dir)
def _load_nhd(self): """USGS NHD (HIGH-RES U.S. Only) Fetch NHD (NHD High/Plus) data from TNM to fill in near-shore areas. High resoultion data varies by location... """ self.p_region.export_as_ogr('region_buff.shp') xsize, ysize, gt = self.p_region.geo_transform(x_inc=self.inc) utils.run_cmd('gdal_rasterize -ts {} {} -te {} -burn -9999 -a_nodata -9999 \ -ot Int32 -co COMPRESS=DEFLATE -a_srs EPSG:{} region_buff.shp {}\ '.format(xsize, ysize, self.p_region.format('te'), self.epsg, self.u_mask), verbose=self.verbose) utils.remove_glob('region_buff.*') this_tnm = fetches.tnm.TheNationalMap(src_region=self.p_region, weight=self.weight, verbose=self.verbose, where="Name LIKE '%Hydro%'", extents='HU-4 Subregion,HU-8 Subbasin').run() #fl = fetches._fetch_modules['tnm'](waffles_proc_region(wg), ["Name LIKE '%Hydro%'"], None, True) r_shp = [] for result in this_tnm.results: #fl._parse_results(e = 'HU-2 Region,HU-4 Subregion,HU-8 Subbasin'): if f_utils.Fetch(result[0], verbose=self.verbose).fetch_file(os.path.join(result[2], result[1])) == 0: gdb_zip = os.path.join(result[2], result[1]) gdb_files = utils.unzip(gdb_zip) gdb_bn = os.path.basename('.'.join(gdb_zip.split('.')[:-1])) gdb = gdb_bn + '.gdb' utils.run_cmd('ogr2ogr {}_NHDArea.shp {} NHDArea -clipdst {} -overwrite 2>&1\ '.format(gdb_bn, gdb, regions.region_format(wg['region'], 'ul_lr')), verbose=False) if os.path.exists('{}_NHDArea.shp'.format(gdb_bn)): r_shp.append('{}_NHDArea.shp'.format(gdb_bn)) utils.run_cmd('ogr2ogr {}_NHDPlusBurnWaterBody.shp {} NHDPlusBurnWaterBody -clipdst {} -overwrite 2>&1\ '.format(gdb_bn, gdb, self.p_region.format('ul_lr')), verbose=False) if os.path.exists('{}_NHDPlusBurnWaterBody.shp'.format(gdb_bn)): r_shp.append('{}_NHDPlusBurnWaterBody.shp'.format(gdb_bn)) utils.run_cmd('ogr2ogr {}_NHDWaterBody.shp {} NHDWaterBody -where "FType = 390" -clipdst {} -overwrite 2>&1\ '.format(gdb_bn, gdb, self.p_region.format('ul_lr')), verbose=False) if os.path.exists('{}_NHDWaterBody.shp'.format(gdb_bn)): r_shp.append('{}_NHDWaterBody.shp'.format(gdb_bn)) utils.remove_glob(gbd) else: utils.echo_error_msg('unable to fetch {}'.format(result)) [utils.run_cmd('ogr2ogr -skipfailures -update -append nhdArea_merge.shp {} 2>&1\ '.format(shp), verbose=False) for shp in r_shp] utils.run_cmd('gdal_rasterize -burn 1 nhdArea_merge.shp {}'.format(self.u_mask), verbose=True) utils.remove_glob('nhdArea_merge.*', 'NHD_*', *r_shp) ## ============================================== ## update wet/dry mask with nhd data ## ============================================== utils.echo_msg('filling the coast mask with NHD data...') c_ds = gdal.Open(self.u_mask) c_ds_arr = c_ds.GetRasterBand(1).ReadAsArray() #c_ds = gdal.Open(u_mask) for this_xyz in demfun.parse(c_ds): xpos, ypos = utils._geo2pixel(this_xyz.x, this_xyz.y, self.dst_gt) try: if self.coast_array[ypos, xpos] == self.ds_config['ndv']: if this_xyz.z == 1: self.coast_array[ypos, xpos] = 0 except: pass c_ds = None utils.remove_glob('{}*'.format(self.u_mask))