def update(self): self.FRED._open_ds() v_zip = os.path.basename(self._hrdem_footprints_url) status = f_utils.Fetch(self._hrdem_footprints_url, verbose=self.verbose).fetch_ftp_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if '.shp' in v: v_shp = v shp_regions = regions.gdal_ogr_regions(v_shp) shp_region = regions.Region() for this_region in shp_regions: if shp_region.valid_p(check_xy=True): shp_region = regions.regions_merge(shp_region, this_region) else: shp_region = this_region geom = shp_region.export_as_geom() self.FRED._attribute_filter(["ID = '{}'".format('HRDEM-1')]) if self.FRED.layer is None or len(self.FRED.layer) == 0: self.FRED._add_survey(Name='High-Resolution DEM (Canada)', ID='HRDEM-1', Agency='NRCAN', Date=utils.this_year(), MetadataLink=self._hrdem_info_url, MetadataDate=utils.this_year(), DataLink=self._hrdem_footprints_url, IndexLink=self._hrdem_footprints_url, DataType='raster', DataSource='hrdem', Info='Canada Only', geom=geom) utils.remove_glob(v_zip, *v_shps) self.FRED._close_ds()
def update(self): self.FRED._open_ds() v_zip = os.path.basename(self._arctic_dem_index_url) status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if '.shp' in v: v_shp = v utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True) utils.remove_glob(v_zip, *v_shps) v_shp = 'arctic_tmp.shp' v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj'] shp_regions = regions.gdal_ogr_regions(v_shp) shp_region = regions.Region() for this_region in shp_regions: #this_region.src_srs = 'epsg:3413' #this_region.warp('epsg:4326') if shp_region.valid_p(check_xy=True): shp_region = regions.regions_merge(shp_region, this_region) else: shp_region = this_region geom = shp_region.export_as_geom() self.FRED._attribute_filter(["ID = '{}'".format('ARCTICDEM-1')]) if self.FRED.layer is None or len(self.FRED.layer) == 0: self.FRED._add_survey(Name = 'ArcticDEM', ID = 'ARCTICDEM-1', Agency = 'UMN', Date = utils.this_year(), MetadataLink = 'https://data.pgc.umn.edu/elev/dem/setsm/ArcticDEM/', MetadataDate = utils.this_year(), DataLink = self._arctic_dem_index_url, IndexLink = self._arctic_dem_index_url, DataType = 'raster', DataSource = 'arcticdem', Info = 'Arctic Only', geom = geom) utils.remove_glob(*v_shps) self.FRED._close_ds()
def yield_xyz(self, entry): src_nos = os.path.basename(entry[1]) dt = None if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_nos) == 0: dt = self._data_type(src_nos) if dt == 'geodas_xyz': nos_fns = utils.p_unzip(src_nos, ['xyz', 'dat']) for nos_f_r in nos_fns: _ds = datasets.XYZFile( fn=nos_f_r, data_format=168, skip=1, xpos=2, ypos=1, zpos=3, z_scale=-1, src_srs='epsg:4326', dst_srs=self.dst_srs, name=nos_f_r, src_region=self.region, verbose=self.verbose, remote=True ) for xyz in _ds.yield_xyz(): yield(xyz) utils.remove_glob(*nos_fns, *[x+'.inf' for x in nos_fns]) elif dt == 'grid_bag': src_bags = utils.p_unzip(src_nos, exts=['bag']) for src_bag in src_bags: _ds = datasets.RasterFile( fn=src_bag, data_format=200, dst_srs=self.dst_srs, name=src_bag, src_region=self.region, verbose=self.verbose ) for xyz in _ds.yield_xyz(): yield(xyz) utils.remove_glob(*src_bags) utils.remove_glob(src_nos)
def extract_shp(self, in_zip, out_ogr=None): v_shp = None v_shps = utils.p_unzip(in_zip, ['shp', 'shx', 'dbf', 'prj']) for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break if v_shp is not None: r_shp = self.extract_region(v_shp, out_ogr) return (r_shp)
def _update_all(self): self.FRED._open_ds(1) v_zip = os.path.basename(self._hrdem_footprints_url) status = f_utils.Fetch(self._hrdem_footprints_url, verbose=self.verbose).fetch_ftp_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if '.shp' in v: v_shp = v try: v_ds = ogr.Open(v_shp) except: v_ds = None status = -1 if v_ds is not None: layer = v_ds.GetLayer() fcount = layer.GetFeatureCount() if self.verbose: _prog = utils.CliProgress( 'scanning {} datasets...'.format(fcount)) for f in range(0, fcount): feature = layer[f] name = feature.GetField('Tile_name') if self.verbose: _prog.update_perc((f, fcount)) try: self.FRED.layer.SetAttributeFilter( "Name = '{}'".format(name)) except: pass if self.FRED.layer is None or len(self.FRED.layer) == 0: data_link = feature.GetField('Ftp_dtm') if data_link is not None: geom = feature.GetGeometryRef() self.FRED._add_survey( Name=name, ID=feature.GetField('Project'), Agency='NRCAN', Date=utils.this_year(), MetadataLink=feature.GetField('Meta_dtm'), MetadataDate=utils.this_year(), DataLink=data_link.replace('http', 'ftp'), IndexLink=self._hrdem_footprints_url, DataType='raster', DataSource='hrdem', HorizontalDatum=feature.GetField( 'Coord_Sys').split(':')[-1], Info=feature.GetField('Provider'), geom=geom) if self.verbose: _prog.end('scanned {} datasets.'.format(fcount)) utils.remove_glob(v_zip, *v_shps) self.FRED._close_ds()
def yield_xyz(self, entry): """ENC data comes as a .000 file in a zip. The data is referenced to MHW and is represente as a depth. In U.S. waters, MHW can be transformed to MSL or the local GEOID using VDatum and/or it's associated grids (mhw.gtx or tss.gtx) """ ## create the tidal transformation grid from mhw to geoid src_zip = os.path.basename(entry[1]) if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_zip) == 0: if entry[2].lower() == 'enc': src_encs = utils.p_unzip(src_zip, ['000']) for src_ch in src_encs: dst_xyz = src_ch.split('.')[0] + '.xyz' try: ds_ogr = ogr.Open(src_ch) layer_s = ds_ogr.GetLayerByName('SOUNDG') if layer_s is not None: with open(dst_xyz, 'w') as o_xyz: for f in layer_s: g = json.loads(f.GetGeometryRef().ExportToJson()) for xyz in g['coordinates']: xyzfun.XYZPoint().from_list([float(x) for x in xyz]).dump(dst_port=o_xyz, encode=False) ds_ogr = layer_s = None except: utils.echo_warning_msg('could not parse {}'.format(src_ch)) _ds = datasets.XYZFile( fn=dst_xyz, data_format=168, z_scale=-1, src_srs='epsg:4326', #src_srs='+proj=longlat +datum=WGS84 +geoidgrids=./{}'.format(vdatum_grid), dst_srs=self.dst_srs, #name=dst_xyz, src_region=self.region, verbose=self.verbose, remote=True ) for xyz in _ds.yield_xyz(): yield(xyz) utils.remove_glob(dst_xyz, o_xyz, *src_encs) utils.remove_glob(src_zip)
def yield_xyz(self, entry): ## use globathy.py to get lake depths and return those #pass if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose, headers=self.headers).fetch_file(entry[1]) == 0: v_shp = None v_zip = entry[1] v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break if v_shp is not None: r_shp = self.extract_region(v_shp) utils.remove_glob(v_zip, *v_shps)
def yield_xyz(self, entry): """yield the xyz data from the copernicus fetch module""" if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose, headers=self.headers).fetch_file(entry[1]) == 0: src_cop_dems = utils.p_unzip(entry[1], ['tif']) for src_cop_dem in src_cop_dems: _ds = datasets.RasterFile( fn=src_cop_dem, data_format=200, src_srs='epsg:4326', dst_srs=self.dst_srs, #name=src_cop_dem, src_region=self.region, verbose=self.verbose) for xyz in _ds.yield_xyz(): if xyz.z != 0: yield (xyz) utils.remove_glob(src_cop_dem, src_cop_dem + '.inf') utils.remove_glob(entry[1])
def run(self): for surv in FRED._filter_FRED(self): status = f_utils.Fetch(surv['IndexLink']).fetch_ftp_file( v_zip, verbose=self.verbose) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break try: v_ds = ogr.Open(v_shp) except: v_ds = None status = -1 if v_ds is not None: layer = v_ds.GetLayer() try: self.FRED.layer.SetAttributeFilter( "Name = '{}'".format(name)) except: pass fcount = layer.GetFeatureCount() for f in range(0, fcount): feature = layer[f] if data_link is not None: geom = feature.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): data_link = feature.GetField('Ftp_dtm').replace( 'http', 'ftp') self.results.append([ data_link, os.path.join(self._outdir, data_link.split('/')[-1]), surv['DataType'] ]) utils.remove_glob(v_zip, *v_shps)
def update(self): self.FRED._open_ds() v_shp = None v_zip = os.path.basename(self._hydrolakes_poly_zip) status = f_utils.Fetch(self._hydrolakes_poly_zip, verbose=self.verbose).fetch_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) for v in v_shps: if '.shp' in v: v_shp = v shp_regions = regions.gdal_ogr_regions(v_shp) shp_region = regions.Region() for this_region in shp_regions: if shp_region.valid_p(check_xy=True): shp_region = regions.regions_merge(shp_region, this_region) else: shp_region = this_region geom = shp_region.export_as_geom() self.FRED._attribute_filter(["ID = '{}'".format('HYDROLAKES')]) if self.FRED.layer is None or len(self.FRED.layer) == 0: self.FRED._add_survey(Name='HydorLakes', ID='HYDROLAKES', Agency='HydroSheds', Date=utils.this_year(), MetadataLink=self._hydrolakes_prods, MetadataDate=utils.this_year(), DataLink=self._hydrolakes_poly_zip, IndexLink=self._hydrolakes_poly_zip, DataType='vector', DataSource='hydrolakes', Info='World-wide lakes', geom=geom) utils.remove_glob(v_zip, *v_shps) self.FRED._close_ds()
def run(self): print(self.arctic_region) for surv in FRED._filter_FRED(self): v_zip = os.path.basename(self._arctic_dem_index_url) status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True) utils.remove_glob(v_zip, *v_shps) v_shp = 'arctic_tmp.shp' v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj'] try: v_ds = ogr.Open(v_shp) except: v_ds = None status = -1 if v_ds is not None: layer = v_ds.GetLayer() try: self.FRED.layer.SetAttributeFilter("Name = '{}'".format(name)) except: pass fcount = layer.GetFeatureCount() for f in range(0, fcount): feature = layer[f] #if data_link is not None: geom = feature.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): #data_link = feature.GetField('Ftp_dtm').replace('http', 'ftp') data_link = feature.GetField('fileurl') self.results.append([data_link, os.path.join(self._outdir, data_link.split('/')[-1]), surv['DataType']]) utils.remove_glob(*v_shps) return(self)
def run(self): if self.datatype is not None: self.where.append("DataType LIKE '%{}%'".format(self.datatype)) for surv in FRED._filter_FRED(self): if self.callback(): break surv_shp_zip = os.path.basename(surv['IndexLink']) if f_utils.Fetch( surv['IndexLink'], callback=self.callback, verbose=self.verbose).fetch_file(surv_shp_zip) == 0: v_shps = utils.p_unzip(surv_shp_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v #try: v_ds = ogr.Open(v_shp) slay1 = v_ds.GetLayer(0) for sf1 in slay1: geom = sf1.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): tile_url = sf1.GetField('URL').strip() self.results.append([ tile_url, os.path.join( self._outdir, '{}/{}'.format(surv['ID'], tile_url.split('/')[-1])), surv['DataType'] ]) v_ds = slay1 = None #except: pass utils.remove_glob(surv_shp_zip, *v_shps)
def yield_xyz(self, entry): """yield the xyz data from the tnm fetch module""" if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(entry[1]) == 0: datatype = entry[-1] if datatype == 'raster': src_tnms = utils.p_unzip(entry[1], ['tif', 'img', 'gdal', 'asc', 'bag']) for src_tnm in src_tnms: _ds = datasets.RasterFile( fn=src_tnm, data_format=200, src_srs='epsg:4326', dst_srs=self.dst_srs, #name=src_tnm, src_region=self.region, verbose=self.verbose) for xyz in _ds.yield_xyz(): if xyz.z != 0: yield (xyz) utils.remove_glob(src_tnm) utils.remove_glob(entry[1])
def run(self): '''Run the DAV fetching module''' if self.region is None: return ([]) _data = { 'where': self.where, 'outFields': '*', 'geometry': self.region.format('bbox'), 'inSR': 4326, 'outSR': 4326, 'f': 'pjson', 'returnGeometry': 'False', } _req = f_utils.Fetch(self._dav_api_url, verbose=self.verbose).fetch_req(params=_data) if _req is not None: features = _req.json() for feature in features['features']: links = json.loads( feature['attributes']['ExternalProviderLink']) if self.index: feature['attributes']['ExternalProviderLink'] = links print(json.dumps(feature['attributes'], indent=4)) else: for link in links['links']: if link['serviceID'] == 46: urllist = 'urllist' + str( feature['attributes']['ID']) + '.txt' surv_name = '_'.join( link['link'].split('/')[-1].split('_')[:-1]) #index_zipfile = 'tileindex.zip' index_zipfile = 'tileindex_{}.zip'.format( surv_name) index_zipurl = link['link'] + '/' + index_zipfile #urllist_url = '/'.join(link['link'].split('/')[:-1]) + '/' + urllist urllist_url = link['link'] + '/' + urllist while True: if f_utils.Fetch( urllist_url, verbose=True).fetch_file(urllist) != 0: if urllist_url == '/'.join( link['link'].split( '/')[:-1]) + '/' + urllist: break urllist_url = '/'.join(link['link'].split( '/')[:-1]) + '/' + urllist else: break with open(urllist, 'r') as ul: for line in ul: if 'tileindex' in line: index_zipurl = line.strip() break utils.remove_glob(urllist) #while True: if f_utils.Fetch(index_zipurl, callback=self.callback, verbose=self.verbose).fetch_file( index_zipfile) == 0: index_shps = utils.p_unzip( index_zipfile, ['shp', 'shx', 'dbf', 'prj']) index_shp = None for v in index_shps: if v.split('.')[-1] == 'shp': index_shp = v index_ds = ogr.Open(index_shp) index_layer = index_ds.GetLayer(0) for index_feature in index_layer: index_geom = index_feature.GetGeometryRef() if index_geom.Intersects( self.region.export_as_geom()): tile_url = index_feature.GetField( 'URL').strip() self.results.append([ tile_url, os.path.join( self._outdir, '{}/{}'.format( feature['attributes'] ['ID'], tile_url.split('/')[-1])), feature['attributes']['DataType'] ]) index_ds = index_layer = None utils.remove_glob(index_zipfile, *index_shps) return (self)
def update(self): """Update or create the reference vector file""" self.FRED._open_ds(1) for vd in self._vdatums: surveys = [] if vd == 'TIDAL' or vd == 'IGLD85': ## ============================================== ## All tidal inf data is in each one, so we only ## have to download one of the tidal zips to process ## them all; lets use the smallest one ## Keep this link up-to-date! ## ============================================== if vd == 'TIDAL': vd_ = 'DEVAemb12_8301' else: vd_ = vd vd_zip_url = '{}{}.zip'.format(self._vdatum_data_url, vd_) v_inf = 'tidal_area.inf' elif vd == 'VERTCON': vd_zip_url = '{}vdatum_{}.zip'.format(self._vdatum_data_url, vd) v_inf = 'vcn.inf' else: vd_zip_url = '{}vdatum_{}.zip'.format(self._vdatum_data_url, vd) v_inf = '{}.inf'.format(vd.lower()) if f_utils.Fetch(vd_zip_url, verbose=True).fetch_file( '{}.zip'.format(vd)) == 0: v_infs = utils.p_unzip('{}.zip'.format(vd), ['inf']) v_dict = proc_vdatum_inf( v_inf, name=vd if vd != 'TIDAL' else None) #, loff=-360 if vd =='TIDAL' else -180) v_dict = proc_vdatum_inf( v_inf, name=vd if vd != 'TIDAL' else None) #, loff=-360) for key in v_dict.keys(): v_dict[key]['vdatum'] = vd v_dict[key]['remote'] = vd_zip_url ## tidal datums: if vd == 'TIDAL': v_dict_ = {} for tidal_key in v_dict.keys(): for t in self._tidal_datums: key_ = '{}_{}'.format(t, tidal_key) v_dict_[key_] = {} v_dict_[key_]['region'] = v_dict[tidal_key][ 'region'] v_dict_[key_]['vdatum'] = t v_dict_[key_]['grid'] = '{}.gtx'.format(t) v_dict_[key_]['remote'] = '{}{}.zip'.format( self._vdatum_data_url, tidal_key) v_dict = v_dict_ print(v_dict) for key in v_dict.keys(): self.FRED._attribute_filter(["ID = '{}'".format(key)]) if self.FRED.layer is None or len(self.FRED.layer) == 0: geom = regions.Region().from_list( v_dict[key]['region']).export_as_geom() if geom is not None: surveys.append({ 'Name': v_dict[key]['grid'], 'ID': key, 'Agency': 'NOAA', 'Date': utils.this_date(), 'MetadataLink': "", 'MetadataDate': utils.this_date(), 'DataLink': v_dict[key]['remote'], 'Link': self._vdatum_data_url, 'DataType': v_dict[key]['vdatum'], 'DataSource': 'vdatum', 'HorizontalDatum': 4326, 'VerticalDatum': v_dict[key]['vdatum'], 'Info': "", 'geom': geom }) self.FRED._add_surveys(surveys) utils.remove_glob(*v_infs, '{}.zip'.format(vd)) self.FRED._close_ds()