def yield_xyz(self, entry): src_data = 'gmrt_tmp.tif' if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_data) == 0: gmrt_ds = datasets.RasterFile( fn=src_data, data_format=200, src_srs='epsg:4326', dst_srs=self.dst_srs, weight=.25, #name=src_data, src_region=self.region, verbose=self.verbose) if self.bathy_only: for xyz in gmrt_ds.yield_xyz(): if xyz.z < 0: yield (xyz) else: for xyz in gmrt_ds.yield_xyz(): yield (xyz) else: utils.echo_error_msg( 'failed to fetch remote file, {}...'.format(src_data)) utils.remove_glob('{}*'.format(src_data))
def update(self): self.FRED._open_ds() v_zip = os.path.basename(self._arctic_dem_index_url) status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if '.shp' in v: v_shp = v utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True) utils.remove_glob(v_zip, *v_shps) v_shp = 'arctic_tmp.shp' v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj'] shp_regions = regions.gdal_ogr_regions(v_shp) shp_region = regions.Region() for this_region in shp_regions: #this_region.src_srs = 'epsg:3413' #this_region.warp('epsg:4326') if shp_region.valid_p(check_xy=True): shp_region = regions.regions_merge(shp_region, this_region) else: shp_region = this_region geom = shp_region.export_as_geom() self.FRED._attribute_filter(["ID = '{}'".format('ARCTICDEM-1')]) if self.FRED.layer is None or len(self.FRED.layer) == 0: self.FRED._add_survey(Name = 'ArcticDEM', ID = 'ARCTICDEM-1', Agency = 'UMN', Date = utils.this_year(), MetadataLink = 'https://data.pgc.umn.edu/elev/dem/setsm/ArcticDEM/', MetadataDate = utils.this_year(), DataLink = self._arctic_dem_index_url, IndexLink = self._arctic_dem_index_url, DataType = 'raster', DataSource = 'arcticdem', Info = 'Arctic Only', geom = geom) utils.remove_glob(*v_shps) self.FRED._close_ds()
def yield_xyz(self, entry): """process ngs monuments""" src_data = 'ngs_tmp.json' src_csv = 'ngs_tmp.csv' if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_data) == 0: with open(src_data, 'r') as json_file: r = json.load(json_file) if len(r) > 0: for row in r: z = utils.float_or(row[self.datum]) if z is not None: xyz = xyzfun.XYZPoint(src_srs='epsg:4326').from_list( [float(row['lon']), float(row['lat']), z]) if self.dst_srs is not None: xyz.warp(dst_srs=self.dst_srs) yield (xyz) else: utils.echo_error_msg( 'failed to fetch remote file, {}...'.format(src_data)) utils.remove_glob('{}*'.format(src_data))
def update(self): self.FRED._open_ds() v_zip = os.path.basename(self._hrdem_footprints_url) status = f_utils.Fetch(self._hrdem_footprints_url, verbose=self.verbose).fetch_ftp_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if '.shp' in v: v_shp = v shp_regions = regions.gdal_ogr_regions(v_shp) shp_region = regions.Region() for this_region in shp_regions: if shp_region.valid_p(check_xy=True): shp_region = regions.regions_merge(shp_region, this_region) else: shp_region = this_region geom = shp_region.export_as_geom() self.FRED._attribute_filter(["ID = '{}'".format('HRDEM-1')]) if self.FRED.layer is None or len(self.FRED.layer) == 0: self.FRED._add_survey(Name='High-Resolution DEM (Canada)', ID='HRDEM-1', Agency='NRCAN', Date=utils.this_year(), MetadataLink=self._hrdem_info_url, MetadataDate=utils.this_year(), DataLink=self._hrdem_footprints_url, IndexLink=self._hrdem_footprints_url, DataType='raster', DataSource='hrdem', Info='Canada Only', geom=geom) utils.remove_glob(v_zip, *v_shps) self.FRED._close_ds()
def _load_background(self): """GSHHG/GMRT - Global low-res Used to fill un-set cells. """ if wg['gc']['GMT'] is not None and not self.want_gmrt: utils.run_cmd('gmt grdlandmask {} -I{} -r -Df -G{}=gd:GTiff -V -N1/0/1/0/1\ '.format(self.p_region.format('gmt'), self.inc, self.g_mask), verbose=self.verbose) else: this_gmrt = gmrt.GMRT(src_region=self.p_region, weight=self.weight, verbose=self.verbose, layer='topo-mask').run() #gmrt_tif = this_gmrt.results[0] this_gmrt.fetch_results() utils.run_cmd('gdalwarp {} {} -tr {} {} -overwrite'.format(gmrt_tif, g_mask, wg['inc'], wg['inc']), verbose = True) #utils.remove_glob(gmrt_tif) ## ============================================== ## update wet/dry mask with gsshg/gmrt data ## speed up! ## ============================================== utils.echo_msg('filling the coast mask with gsshg/gmrt data...') c_ds = gdal.Open(self.g_mask) c_ds_arr = c_ds.GetRasterBand(1).ReadAsArray() #c_ds = gdal.Open(self.g_mask) for this_xyz in gdalfun.gdal_parse(c_ds): xpos, ypos = utils._geo2pixel(this_xyz.x, this_xyz.y, self.dst_gt) try: if self.coast_array[ypos, xpos] == self.ds_config['ndv']: if this_xyz.z == 1: self.coast_array[ypos, xpos] = 0 elif this_xyz.z == 0: self.coast_array[ypos, xpos] = 1 except: pass c_ds = None utils.remove_glob('{}*'.format(self.g_mask))
def yield_xyz(self, entry): src_data = 'mar_grav_tmp.xyz' if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose, verify=False).fetch_file(src_data) == 0: _ds = datasets.XYZFile( fn=src_data, data_format=168, skip=1, x_offset=-360, src_srs='epsg:4326', dst_srs=self.dst_srs, #name=src_data, src_region=self.region, verbose=self.verbose, remote=True) for xyz in _ds.yield_xyz(): yield (xyz) else: utils.echo_error_msg( 'failed to fetch remote file, {}...'.format(src_data)) utils.remove_glob('{}*'.format(src_data))
def yield_xyz(self, entry): src_ncei = os.path.basename(entry[1]) #try: # src_ds = gdal.Open(entry[0]) # src_dc = entry[0] #except Exception as e: f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_ncei) try: src_ds = gdal.Open(src_ncei) except Exception as e: utils.echo_error_msg( 'could not read ncei raster file: {}, {}'.format(entry[0], e)) src_ds = None #except Exception as e: # utils.echo_error_msg('could not read ncei raster file: {}, {}'.format(entry[0], e)) # src_ds = None if src_ds is not None: _ds = datasets.RasterFile( fn=src_ncei, data_format=200, src_srs='epsg:4326', dst_srs=self.dst_srs, #name=src_ncei, src_region=self.region, verbose=self.verbose) _ds.src_ds = src_ds _ds.ds_open_p = True for xyz in _ds.yield_xyz(): yield (xyz) src_ds = None utils.remove_glob(src_ncei)
def _init_vector(self): self.dst_layer = '{}_sm'.format(self.name) self.dst_vector = self.dst_layer + '.shp' self.v_fields = [ 'Name', 'Agency', 'Date', 'Type', 'Resolution', 'HDatum', 'VDatum', 'URL' ] self.t_fields = [ ogr.OFTString, ogr.OFTString, ogr.OFTString, ogr.OFTString, ogr.OFTString, ogr.OFTString, ogr.OFTString, ogr.OFTString ] utils.remove_glob('{}.*'.format(self.dst_layer)) utils.gdal_prj_file('{}.prj'.format(self.dst_layer), self.epsg) self.ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource( self.dst_vector) if self.ds is not None: self.layer = self.ds.CreateLayer('{}'.format(self.dst_layer), None, ogr.wkbMultiPolygon) [ self.layer.CreateField( ogr.FieldDefn('{}'.format(f), self.t_fields[i])) for i, f in enumerate(self.v_fields) ] [self.layer.SetFeature(feature) for feature in self.layer] else: self.layer = None
def waffles_vdatum(wg, ivert='navd88', overt='mhw', region='4', jar=None): """generate a 'conversion-grid' with vdatum. output will be the differences (surfaced) between `ivert` and `overt` for the region Args: wg (dict): a waffles config dictionary ivert (str): input vertical datum string overt (str): output vertical datum string region (str): vdatum grid region jar (path): path to vdatum .jar file Returns: list: [{'dem': ['dem-fn', 'raster']}, status] """ vc = vdatumfun._vd_config if jar is None: vc['jar'] = vdatumfun.vdatum_locate_jar()[0] else: vc['jar'] = jar vc['ivert'] = ivert vc['overt'] = overt vc['region'] = region gdalfun.gdal_null('empty.tif', waffles_proc_region(wg), 0.00083333, nodata=0) with open('empty.xyz', 'w') as mt_xyz: for xyz in gdalfun.gdal_yield_entry(['empty.tif', 200, 1]): xyzfun.xyz_line(xyz, mt_xyz, False) vdatumfun.run_vdatum('empty.xyz', vc) if os.path.exists( 'result/empty.xyz') and os.stat('result/empty.xyz').st_size != 0: with open('result/empty.xyz') as infile: empty_infos = xyzfun.xyz_inf(infile) print(empty_infos) ll = 'd' if empty_infos['minmax'][4] < 0 else '0' lu = 'd' if empty_infos['minmax'][5] > 0 else '0' wg['data'] = ['result/empty.xyz'] wg['spat'] = False wg['unc'] = False wg = waffles_config(**wg) vd_out, status = waffles_gmt_surface(wg, tension=0, upper_limit=lu, lower_limit=ll) else: utils.echo_error_msg('failed to generate VDatum grid, check settings') vd_out = {} status = -1 utils.remove_glob('empty.*', 'result/*', '.mjr.datalist', 'result') return (vd_out, status)
def vdatum_clean_result(self): """clean the vdatum 'result' folder""" utils.remove_glob('{}/*'.format(self.result_dir)) try: os.removedirs(self.result_dir) except: pass
def yield_array(self): for this_entry in self.parse_json(): for arr in this_entry.yield_array(): yield (arr) if this_entry.remote: utils.remove_glob('{}*'.format(this_entry.fn))
def yield_xyz(self): """parse the data from the datalist""" for this_entry in self.parse_json(): for xyz in this_entry.yield_xyz(): yield (xyz) if this_entry.remote: utils.remove_glob('{}*'.format(this_entry.fn))
def parse_entry_inf(self, entry, keep_inf=False): src_data = os.path.basename(entry[1]) src_mb = src_data[:-4] survey = entry[0].split('/')[7] if f_utils.Fetch('{}.inf'.format(entry[0][:-4]), callback=self.callback, verbose=False).fetch_file('{}.inf'.format(src_mb)) == 0: mb_fmt = self.mb_inf_data_format('{}.inf'.format(src_mb)) mb_date = self.mb_inf_data_date('{}.inf'.format(src_mb)) mb_perc = self.mb_inf_perc_good('{}.inf'.format(src_mb)) if not keep_inf: utils.remove_glob('{}.inf'.format(src_mb)) return(survey, src_data, mb_fmt, mb_perc, mb_date)
def write_datalist(data_list, outname=None): if outname is None: outname = '{}_{}'.format(self.metadata['name'], utils.this_year()) if os.path.exists('{}.datalist'.format(outname)): utils.remove_glob('{}.datalist*'.format(outname)) with open('{}.datalist'.format(outname), 'w') as tmp_dl: [tmp_dl.write('{}\n'.format(x.format_entry())) for x in data_list] return ('{}.datalist'.format(outname))
def _update_all(self): self.FRED._open_ds(1) v_zip = os.path.basename(self._hrdem_footprints_url) status = f_utils.Fetch(self._hrdem_footprints_url, verbose=self.verbose).fetch_ftp_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if '.shp' in v: v_shp = v try: v_ds = ogr.Open(v_shp) except: v_ds = None status = -1 if v_ds is not None: layer = v_ds.GetLayer() fcount = layer.GetFeatureCount() if self.verbose: _prog = utils.CliProgress( 'scanning {} datasets...'.format(fcount)) for f in range(0, fcount): feature = layer[f] name = feature.GetField('Tile_name') if self.verbose: _prog.update_perc((f, fcount)) try: self.FRED.layer.SetAttributeFilter( "Name = '{}'".format(name)) except: pass if self.FRED.layer is None or len(self.FRED.layer) == 0: data_link = feature.GetField('Ftp_dtm') if data_link is not None: geom = feature.GetGeometryRef() self.FRED._add_survey( Name=name, ID=feature.GetField('Project'), Agency='NRCAN', Date=utils.this_year(), MetadataLink=feature.GetField('Meta_dtm'), MetadataDate=utils.this_year(), DataLink=data_link.replace('http', 'ftp'), IndexLink=self._hrdem_footprints_url, DataType='raster', DataSource='hrdem', HorizontalDatum=feature.GetField( 'Coord_Sys').split(':')[-1], Info=feature.GetField('Provider'), geom=geom) if self.verbose: _prog.end('scanned {} datasets.'.format(fcount)) utils.remove_glob(v_zip, *v_shps) self.FRED._close_ds()
def parse_(self): import zipfile exts = [ DatasetFactory().data_types[x]['fmts'] for x in DatasetFactory().data_types.keys() ] exts = [x for y in exts for x in y] datalist = [] if self.fn.split('.')[-1].lower() == 'zip': with zipfile.ZipFile(self.fn) as z: zfs = z.namelist() for ext in exts: for zf in zfs: if ext == zf.split('.')[-1]: datalist.append(os.path.basename(zf)) for this_data in datalist: this_line = utils.p_f_unzip(self.fn, [this_data])[0] data_set = DatasetFactory(this_line, weight=self.weight, parent=self, src_region=self.region, metadata=copy.deepcopy(self.metadata), src_srs=self.src_srs, dst_srs=self.dst_srs, verbose=self.verbose).acquire() if data_set is not None and data_set.valid_p( fmts=DatasetFactory.data_types[ data_set.data_format]['fmts']): if self.region is not None and self.region.valid_p( check_xy=True): try: inf_region = regions.Region().from_string( data_set.infos['wkt']) except: try: inf_region = regions.Region().from_list( data_set.infos['minmax']) except: inf_region = self.region.copy() inf_region.wmin = data_set.weight inf_region.wmax = data_set.weight if regions.regions_intersect_p(inf_region, self.region): for ds in data_set.parse(): self.data_entries.append(ds) yield (ds) else: for ds in data_set.parse(): self.data_entries.append(ds) yield (ds) utils.remove_glob('{}*'.format(this_data))
def yield_xyz(self, entry): src_dc = os.path.basename(entry[1]) src_ext = src_dc.split('.')[-1].lower() if src_ext == 'laz' or src_ext == 'las': dt = 'lidar' elif src_ext == 'tif' or src_ext == 'img': dt = 'raster' else: dt = None if dt == 'lidar': if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_dc) == 0: _ds = datasets.LASFile( fn=src_dc, data_format=400, dst_srs=self.dst_srs, #name=src_dc, src_region=self.region, verbose=self.verbose, remote=True) if self.inc is not None: b_region = regions.regions_reduce( self.region, regions.Region().from_list(_ds.infos['minmax'])) xyz_func = lambda p: _ds.dump_xyz(dst_port=p, encode=True) for xyz in utils.yield_cmd( 'gmt blockmedian -I{:.10f} {} -r -V'.format( self.inc, b_region.format('gmt')), verbose=self.verbose, data_fun=xyz_func): yield (xyzfun.XYZPoint().from_list( [float(x) for x in xyz.split()])) else: for xyz in _ds.yield_xyz(): yield (xyz) utils.remove_glob('{}*'.format(src_dc)) elif dt == 'raster': if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_dc) == 0: _ds = datasets.RasterFile( fn=src_dc, data_format=200, dst_srs=self.dst_srs, #src_srs=None, name=src_dc, src_region=self.region, verbose=self.verbose) for xyz in _ds.block_xyz( inc=self.inc, want_gmt=True ) if self.inc is not None else _ds.yield_xyz(): yield (xyz) utils.remove_glob('{}.*'.format(src_dc))
def _load_coast_mask(self): """load the wet/dry mask array""" ds = gdal.Open(self.w_mask) if ds is not None: self.ds_config = demfun.gather_infos(ds) this_region = regions.Region().from_geo_transform(self.ds_config['geoT'], self.ds_config['nx'], self.ds_config['ny']) self.coast_array = ds.GetRasterBand(1).ReadAsArray(0, 0, self.ds_config['nx'], self.ds_config['ny']) ds = None else: utils.echo_error_msg('could not open {}'.format(self.w_mask)) sys.exit() utils.remove_glob('{}*'.format(self.w_mask))
def fetch_queue(q, m, p=False): """fetch queue `q` of fetch results\ each fetch queue should be a list of the following: [remote_data_url, local_data_path, regions.region, lambda: stop_p, data-type] if region is defined, will prompt the queue to process the data to the given region. """ while True: fetch_args = q.get() #this_region = fetch_args[2] if not m.callback(): if not os.path.exists(os.path.dirname(fetch_args[1])): try: os.makedirs(os.path.dirname(fetch_args[1])) except: pass #if this_region is None: if not p: if fetch_args[0].split(':')[0] == 'ftp': Fetch(url=fetch_args[0], callback=m.callback, verbose=m.verbose, headers=m.headers).fetch_ftp_file(fetch_args[1]) else: Fetch(url=fetch_args[0], callback=m.callback, verbose=m.verbose, headers=m.headers, verify=False if fetch_args[2] == 'srtm' or fetch_args[2] == 'mar_grav' else True).fetch_file( fetch_args[1]) else: if m.region is not None: o_x_fn = fetch_args[1] + m.region.format('fn') + '.xyz' else: o_x_fn = fetch_args[1] + '.xyz' utils.echo_msg('processing local file: {}'.format(o_x_fn)) if not os.path.exists(o_x_fn): with open(o_x_fn, 'w') as out_xyz: m.dump_xyz(fetch_args, dst_port=out_xyz) try: if os.path.exists(o_x_fn): if os.stat(o_x_fn).st_size == 0: utils.remove_glob(o_x_fn) except: pass q.task_done()
def _write_coast_poly(self): """convert to coast_array vector""" tmp_ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource('tmp_c_{}.shp'.format(self.name)) if tmp_ds is not None: tmp_layer = tmp_ds.CreateLayer('tmp_c_{}'.format(self.name), None, ogr.wkbMultiPolygon) tmp_layer.CreateField(ogr.FieldDefn('DN', ogr.OFTInteger)) demfun.polygonize('{}.tif'.format(self.name), tmp_layer, verbose=self.verbose) tmp_ds = None utils.run_cmd('ogr2ogr -dialect SQLITE -sql "SELECT * FROM tmp_c_{} WHERE DN=0 order by ST_AREA(geometry) desc limit 8"\ {}.shp tmp_c_{}.shp'.format(self.name, self.name, self.name), verbose = True) utils.remove_glob('tmp_c_{}.*'.format(self.name)) utils.run_cmd('ogrinfo -dialect SQLITE -sql "UPDATE {} SET geometry = ST_MakeValid(geometry)" {}.shp\ '.format(self.name, self.name))
def yield_xyz(self, entry): """ENC data comes as a .000 file in a zip. The data is referenced to MHW and is represente as a depth. In U.S. waters, MHW can be transformed to MSL or the local GEOID using VDatum and/or it's associated grids (mhw.gtx or tss.gtx) """ ## create the tidal transformation grid from mhw to geoid src_zip = os.path.basename(entry[1]) if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_zip) == 0: if entry[2].lower() == 'enc': src_encs = utils.p_unzip(src_zip, ['000']) for src_ch in src_encs: dst_xyz = src_ch.split('.')[0] + '.xyz' try: ds_ogr = ogr.Open(src_ch) layer_s = ds_ogr.GetLayerByName('SOUNDG') if layer_s is not None: with open(dst_xyz, 'w') as o_xyz: for f in layer_s: g = json.loads(f.GetGeometryRef().ExportToJson()) for xyz in g['coordinates']: xyzfun.XYZPoint().from_list([float(x) for x in xyz]).dump(dst_port=o_xyz, encode=False) ds_ogr = layer_s = None except: utils.echo_warning_msg('could not parse {}'.format(src_ch)) _ds = datasets.XYZFile( fn=dst_xyz, data_format=168, z_scale=-1, src_srs='epsg:4326', #src_srs='+proj=longlat +datum=WGS84 +geoidgrids=./{}'.format(vdatum_grid), dst_srs=self.dst_srs, #name=dst_xyz, src_region=self.region, verbose=self.verbose, remote=True ) for xyz in _ds.yield_xyz(): yield(xyz) utils.remove_glob(dst_xyz, o_xyz, *src_encs) utils.remove_glob(src_zip)
def yield_xyz(self, entry): """yield the xyz data from the nasadem fetch module""" if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose, headers=self.headers).fetch_file(entry[1]) == 0: _ds = datasets.RasterFile( fn=entry[1], data_format=200, src_srs='epsg:4326', dst_srs=self.dst_srs, #name=entry[1], src_region=self.region, verbose=self.verbose ) for xyz in _ds.yield_xyz(): if xyz.z != 0: yield(xyz) utils.remove_glob(entry[1])
def yield_xyz(self, entry): src_emodnet = 'emodnet_tmp.tif' if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_emodnet) == 0: _ds = datasets.RasterFile(fn=src_emodnet, data_format=200, src_srs='epsg:4326', dst_srs=self.dst_srs, name=src_emodnet, src_region=self.region, verbose=self.verbose) for xyz in _ds.yield_xyz(): yield (xyz) else: utils.echo_error_msg( 'failed to fetch remote file, {}...'.format(src_emodnet)) utils.remove_glob(src_emodnet)
def run(self): for xdl in self.data: for x in xdl.data_lists.keys(): xdl.data_entries = xdl.data_lists[x] dl_name = x o_v_fields = [ dl_name, 'Unknown', '0', 'xyz_elevation', 'Unknown', 'WGS84', 'NAVD88', 'URL' ] defn = None if self.layer is None else self.layer.GetLayerDefn( ) [x for x in xdl.mask_xyz('{}.tif'.format(dl_name), sm_inc)] if demfun.infos('{}.tif'.format(dl_name), scan=True)['zr'][1] == 1: tmp_ds = ogr.GetDriverByName( 'ESRI Shapefile').CreateDataSource( '{}_poly.shp'.format(dl_name)) if tmp_ds is not None: tmp_layer = tmp_ds.CreateLayer( '{}_poly'.format(dl_name), None, ogr.wkbMultiPolygon) tmp_layer.CreateField( ogr.FieldDefn('DN', ogr.OFTInteger)) demfun.polygonize('{}.tif'.format(dl_name), tmp_layer, verbose=self.verbose) if len(tmp_layer) > 1: if defn is None: defn = tmp_layer.GetLayerDefn() out_feat = gdal_ogr_mask_union( tmp_layer, 'DN', defn) [ out_feat.SetField(f, o_v_fields[i]) for i, f in enumerate(self.v_fields) ] self.layer.CreateFeature(out_feat) tmp_ds = None utils.remove_glob('{}_poly.*'.format(dl_name), 'tmp.tif') self.ds = None utils.run_cmd( 'ogrinfo -spat {} -dialect SQLITE -sql "UPDATE {} SET geometry = ST_MakeValid(geometry)" {}\ '.format(dr.format('ul_lr'), self.dst_layer, self.dst_vector))
def yield_xyz(self, entry): """process stations""" src_data = 'tides_tmp.json' src_csv = 'tides_tmp.csv' ln = 0 if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_data) == 0: with open(src_data, 'r') as json_file: r = json.load(json_file) if len(r) > 0: for feature in r['features']: if self.station_id is not None: if self.station_id != feature['attributes']['id']: continue lon = feature['attributes']['longitude'] lat = feature['attributes']['latitude'] z = feature['attributes'][ self.s_datum] - feature['attributes'][self.t_datum] if self.units == 'm': z = z * 0.3048 xyz = xyzfun.XYZPoint(src_srs='epsg:4326').from_list( [lon, lat, z]) if self.dst_srs is not None: xyz.warp(dst_srs=self.dst_srs) ln += 1 yield (xyz) else: utils.echo_error_msg( 'failed to fetch remote file, {}...'.format(src_data)) if self.verbose: utils.echo_msg('parsed {} data records from {}'.format( ln, src_data)) utils.remove_glob('{}*'.format(src_data))
def _htdp_transform(self, epsg_in, epsg_out): """create an htdp transformation grid""" htdp = htdpfun.HTDP() utils.echo_msg('{}: HTDP: {}->{}'.format(self.src_region, epsg_in, epsg_out)) griddef = (self.src_region.xmin, self.src_region.ymax, self.src_region.xmin, self.src_region.ymin, self.xcount, self.ycount) grid = htdp._new_create_grid(griddef) htdp._write_grid(grid, '_tmp_input.xyz') htdp._write_control('_tmp_control.txt', '_tmp_output.xyz', '_tmp_input.xyz', _htdp_reference_frames[epsg_in]['htdp_id'], 2012.0, _htdp_reference_frames[epsg_out]['htdp_id'], 2012.0) htdp.run('_tmp_control.txt') out_grid = htdp._read_grid('_tmp_output.xyz', (griddef[5],griddef[4])) utils.remove_glob('_tmp_output.xyz', '_tmp_input.xyz', '_tmp_control.txt') return(out_grid, epsg_out)
def yield_xyz(self, entry): ## use globathy.py to get lake depths and return those #pass if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose, headers=self.headers).fetch_file(entry[1]) == 0: v_shp = None v_zip = entry[1] v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break if v_shp is not None: r_shp = self.extract_region(v_shp) utils.remove_glob(v_zip, *v_shps)
def _create_ds(self): utils.remove_glob(self.FREDloc) self.ds = self.driver.CreateDataSource(self.FREDloc) self.layer = self.ds.CreateLayer('FRED', None, ogr.wkbMultiPolygon) ldfn = self.layer.GetLayerDefn() self.layer.CreateField(ogr.FieldDefn('Name', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('ID', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('Date', ogr.OFTInteger)) self.layer.CreateField(ogr.FieldDefn('Agency', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('MetadataLink', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('MetadataDate', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('DataLink', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('IndexLink', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('Link', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('DataType', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('DataSource', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('Resolution', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('HorizontalDatum', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('VerticalDatum', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('LastUpdate', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('Etcetra', ogr.OFTString)) self.layer.CreateField(ogr.FieldDefn('Info', ogr.OFTString))
def _init_datalist_vector(self): self.dst_layer = '{}'.format(self.fn) self.dst_vector = self.dst_layer + '.json' utils.remove_glob('{}.json'.format(self.dst_layer)) if self.src_srs is not None: utils.gdal_prj_file('{}.prj'.format(self.dst_layer), self.src_srs) self.ds = ogr.GetDriverByName('GeoJSON').CreateDataSource( self.dst_vector) if self.ds is not None: self.layer = self.ds.CreateLayer('{}'.format(self.dst_layer), None, ogr.wkbMultiPolygon) [ self.layer.CreateField( ogr.FieldDefn('{}'.format(f), self.t_fields[i])) for i, f in enumerate(self.v_fields) ] [self.layer.SetFeature(feature) for feature in self.layer] else: self.layer = None
def _cdn_transform(self, epsg=None, name=None, invert=False): """create a cdn transofrmation grid""" if epsg is not None: cdn_results = cudem.fetches.vdatum.search_proj_cdn(self.src_region, epsg=epsg, cache_dir=self.cache_dir) else: cdn_results = cudem.fetches.vdatum.search_proj_cdn(self.src_region, cache_dir=self.cache_dir) for _result in cdn_results: for g in _geoids: if g in _result['name']: cdn_results = [_result] if len(cdn_results) > 0: for _result in cdn_results: src_code = int(_result['source_crs_code'].split(':')[-1]) dst_code = int(_result['target_crs_code'].split(':')[-1]) #if epsg == dst_code or epsg == src_code or np.any([g in _result['name'] for g in self._geoids]): if epsg == dst_code or np.any([g in _result['name'] for g in _geoids]): if src_code in _htdp_reference_frames.keys(): _trans_grid = os.path.join(self.cache_dir, _result['name']) if cudem.fetches.utils.Fetch(_result['url'], verbose=self.verbose).fetch_file(_trans_grid) == 0: tmp_infos = demfun.infos(_trans_grid) tmp_region = regions.Region().from_geo_transform(tmp_infos['geoT'], tmp_infos['nx'], tmp_infos['ny']) if os.path.exists('_{}'.format(os.path.basename(_trans_grid))): utils.remove_glob('_{}'.format(os.path.basename(_trans_grid))) utils.run_cmd('gdalwarp {} {} -s_srs epsg:4326 -te {} -ts {} {} --config CENTER_LONG 0'.format( _trans_grid, '_{}'.format(os.path.basename(_trans_grid)), self.src_region.format('te'), self.xcount, self.ycount ), verbose=True) _tmp_array, _tmp_infos = demfun.get_array('_{}'.format(os.path.basename(_trans_grid))) utils.remove_glob('_{}'.format(os.path.basename(_trans_grid))) if invert: _tmp_array = _tmp_array * -1 return(_tmp_array, src_code) utils.echo_error_msg('failed to locate transformation for {}'.format(epsg)) return(np.zeros( (self.ycount, self.xcount) ), epsg)