def run(self): '''Run the NASADEM DEM fetching module''' for surv in FRED._filter_FRED(self): for i in surv['DataLink'].split(','): self.results.append([i, os.path.join(self._outdir, i.split('/')[-1].split('?')[0]), surv['DataType']]) return(self)
def __init__(self, where=[], formats=None, extents=None, q=None, **kwargs): super().__init__(**kwargs) self._tnm_api_url = 'http://tnmaccess.nationalmap.gov/api/v1' self._tnm_dataset_url = 'https://tnmaccess.nationalmap.gov/api/v1/datasets?' self._tnm_product_url = 'https://tnmaccess.nationalmap.gov/api/v1/products?' self._tnm_meta_base = 'https://www.sciencebase.gov/catalog/item/' self._elev_ds = [ 'National Elevation Dataset (NED) 1 arc-second', 'Digital Elevation Model (DEM) 1 meter', 'National Elevation Dataset (NED) 1/3 arc-second', 'National Elevation Dataset (NED) 1/9 arc-second', 'National Elevation Dataset (NED) Alaska 2 arc-second', 'Alaska IFSAR 5 meter DEM', 'Original Product Resolution (OPR) Digital Elevation Model (DEM)', 'Ifsar Digital Surface Model (DSM)', 'Ifsar Orthorectified Radar Image (ORI)', 'Lidar Point Cloud (LPC)', 'National Hydrography Dataset Plus High Resolution (NHDPlus HR)', 'National Hydrography Dataset (NHD) Best Resolution', 'National Watershed Boundary Dataset (WBD)', 'USDA National Agriculture Imagery Program (NAIP)', 'Topobathymetric Lidar DEM', 'Topobathymetric Lidar Point Cloud' ] self._outdir = os.path.join(os.getcwd(), 'tnm') self.where = where self.name = 'tnm' self._urls = [self._tnm_api_url] self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self.update_if_not_in_FRED() self.formats = formats self.extents = extents self.q = q
def __init__(self, where=[], datatype=None, gtx=False, epsg=None, **kwargs): super().__init__(**kwargs) self._vdatum_data_url = 'https://vdatum.noaa.gov/download/data/' self._proj_vdatum_index = 'https://cdn.proj.org/files.geojson' self._outdir = os.path.join(os.getcwd(), 'vdatum') ## add others IGLD85 #self._vdatums = ['VERTCON', 'EGM1984', 'EGM1996', 'EGM2008', 'GEOID03', 'GEOID06', 'GEOID09', 'GEOID12A', 'GEOID12B', 'GEOID96', 'GEOID99', 'TIDAL'] self._vdatums = ['TIDAL'] self._tidal_datums = ['mhw', 'mhhw', 'mlw', 'mllw', 'tss', 'mtl'] self.where = where self.datatype = datatype self.epsg = utils.int_or(epsg) self.gtx = gtx self.name = 'vdatum' self.v_datum = 'varies' self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self.update_if_not_in_FRED()
def __init__(self, where='', datatype=None, update=False, **kwargs): super().__init__(**kwargs) self._nos_url = 'https://www.ngdc.noaa.gov/mgg/bathymetry/hydro.html' self._nos_xml_url = lambda nd: 'https://data.noaa.gov/waf/NOAA/NESDIS/NGDC/MGG/NOS/%siso_u/xml/' %(nd) self._nos_iso_xml_url = lambda nd: 'https://data.noaa.gov/waf/NOAA/NESDIS/NGDC/MGG/NOS/%siso/xml/' %(nd) self._nos_directories = [ "B00001-B02000/", "D00001-D02000/", "F00001-F02000/", \ "H00001-H02000/", "H02001-H04000/", "H04001-H06000/", \ "H06001-H08000/", "H08001-H10000/", "H10001-H12000/", \ "H12001-H14000/", "L00001-L02000/", "L02001-L04000/", \ "T00001-T02000/", "W00001-W02000/" \ ] self._outdir = os.path.join(os.getcwd(), 'nos') self._nos_fmts = ['.xyz.gz', '.bag.gz', '.bag'] self.where = [where] if len(where) > 0 else [] self.datatype = datatype if self.datatype is not None: self.where.append("DataType LIKE '%{}%'".format(self.datatype.upper())) self.name = 'nos' self._urls = [self._nos_url] self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self.want_update = update if self.want_update: self.update() else: self.update_if_not_in_FRED()
def run(self): """Search for data in the reference vector file""" if self.datatype is not None: self.where.append("DataType = '{}'".format(self.datatype)) for surv in FRED._filter_FRED(self): for i in surv['DataLink'].split(','): self.results.append([i, os.path.join(self._outdir, i.split('/')[-1]), surv['DataType']])
def _parse_prods_results(self, r, f=None, e=None, q=None): for surv in FRED._filter_FRED(self): for d in surv['DataLink'].split(','): if d != '': self.results.append([ d, os.path.join(self._outdir, d.split('/')[-1]), surv['DataType'] ])
def __init__(self, where='', **kwargs): super().__init__(**kwargs) self._hrdem_footprints_url = 'ftp://ftp.maps.canada.ca/pub/elevation/dem_mne/highresolution_hauteresolution/Datasets_Footprints.zip' self._hrdem_info_url = 'https://open.canada.ca/data/en/dataset/957782bf-847c-4644-a757-e383c0057995#wb-auto-6' self._outdir = os.path.join(os.getcwd(), 'hrdem') self.where = [where] if len(where) > 0 else [] self.name = 'hrdem' self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self.update_if_not_in_FRED()
def __init__(self, where='1=1', layer=0, **kwargs): super().__init__(**kwargs) self._arctic_dem_index_url = 'https://data.pgc.umn.edu/elev/dem/setsm/ArcticDEM/indexes/ArcticDEM_Tile_Index_Rel7.zip' self._outdir = os.path.join(os.getcwd(), 'arcticdem') self.name = 'arcticdem' self.where = [where] if len(where) > 0 else [] self.arctic_region = self.region.copy() #self.arctic_region.warp('epsg:3413') self.FRED = FRED.FRED(name=self.name, verbose = self.verbose) self.update_if_not_in_FRED()
def __init__(self, where=[], **kwargs): super().__init__(**kwargs) self._nt_catalog = "https://www.ngdc.noaa.gov/thredds/demCatalog.xml" self._ngdc_url = "https://www.ngdc.noaa.gov" self._outdir = os.path.join(os.getcwd(), 'ncei_thredds') ##self.where = where ##self.where.append(where) self.where = [where] if len(where) > 0 else [] self.name = 'ncei_thredds' self._urls = [self._nt_catalog, self._ngdc_url] self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self.update_if_not_in_FRED()
def run(self): """Search the NOS reference vector and append the results to the results list.""" for surv in FRED._filter_FRED(self): for i in surv['DataLink'].split(','): if i != '': dt = self._data_type(i) if self.datatype is not None: if self.datatype.lower() in dt: self.results.append([i, os.path.join(self._outdir, i.split('/')[-1]), surv['DataType']]) else: self.results.append([i, os.path.join(self._outdir, i.split('/')[-1]), surv['DataType']])
def run(self): emod_wcs = f_utils.WCS(self._emodnet_grid_url) for surv in FRED._filter_FRED(self): d = emod_wcs._describe_coverage(surv['ID']) if d is not None: ds_region = emod_wcs._get_coverage_region(d) if regions_intersect_ogr_p(self.region, ds_region): emod_url = emod_wcs._get_coverage_url(surv['ID'], region=self.region) outf = 'emodnet_{}.tif'.format(self.region.format('fn')) self.results.append([ emod_url, os.path.join(self._outdir, outf), surv['DataType'] ])
def __init__(self, where='', datatype=None, inc=None, **kwargs): super().__init__(**kwargs) self._dc_url = 'https://coast.noaa.gov' self._dc_htdata_url = 'https://coast.noaa.gov/htdata/' #self._dc_dirs = ['lidar1_z', 'lidar2_z', 'lidar3_z', 'lidar4_z', 'raster1', 'raster2', 'raster5'] self._dc_dirs = ['lidar1_z', 'raster1'] self._outdir = os.path.join(os.getcwd(), 'digital_coast') self.where = [where] if len(where) > 0 else [] self.datatype = datatype self.inc = utils.str2inc(inc) self.name = 'dc' self._urls = [self._dc_url, self._dc_htdata_url] self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self._update_if_not_in_FRED()
def run(self): """Search for data in the reference vector file""" for surv in FRED._filter_FRED(self): #wcs_url = "{}?request=GetCoverage&version=1.0.0&service=WCS&coverage={}&bbox={}&format=NetCDF3"\ # .format(surv['IndexLink'], surv['Etcetra'], self.region.format('bbox')) #print(wcs_url) #self.results.append([wcs_url, surv['DataLink'].split(',')[0].split('/')[-1], surv['DataType']]) for d in surv['DataLink'].split(','): if d != '': self.results.append([ d, os.path.join(self._outdir, d.split('/')[-1]), surv['DataType'] ])
def __init__(self, where='', datatype=None, **kwargs): super().__init__(**kwargs) self.nasadem_rurl = 'https://opentopography.s3.sdsc.edu/minio/raster/NASADEM/NASADEM_be/' self.nasadem_url = 'https://opentopography.s3.sdsc.edu/minio/download/raster/NASADEM/NASADEM_be/' self.nasadem_vrt_url = 'https://opentopography.s3.sdsc.edu/minio/download/raster/NASADEM/NASADEM_be.vrt?token=' self.where = [where] if len(where) > 0 else [] self.datatype = datatype self._outdir = os.path.join(os.getcwd(), 'nasadem') self.name = 'nasadem' self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0', 'referer': 'https://opentopography.s3.sdsc.edu/minio/raster/NASADEM/NASADEM_be/' } self.update_if_not_in_FRED()
def run(self): chs_wcs = f_utils.WCS(self._chs_url) for surv in FRED._filter_FRED(self): d = chs_wcs._describe_coverage(surv['ID']) if d is not None: ds_region = chs_wcs._get_coverage_region(d) if regions_intersect_ogr_p(self.region, ds_region): chs_url = chs_wcs._get_coverage_url( chs_wcs.fix_coverage_id(surv['ID']), region=self.region) outf = '{}_{}.tif'.format( surv['ID'].replace(' ', '_').replace('caris__', 'chs_'), self.region.format('fn')) self.results.append([chs_url, outf, surv['DataType']])
def __init__(self, where=[], **kwargs): super().__init__(**kwargs) self._chs_url = 'https://data.chs-shc.ca/geoserver/wcs?' self._chs_grid_cap = 'https://data.chs-shc.ca/geoserver/wcs?request=GetCapabilities&service=WMS' self._chs_info_url = 'https://open.canada.ca/data/en/dataset/d3881c4c-650d-4070-bf9b-1e00aabf0a1d' self._chs_api_url = "https://geoportal.gc.ca/arcgis/rest/services/FGP/CHS_NONNA_100/MapServer/0/query?" self._outdir = os.path.join(os.getcwd(), 'chs') self.where = where self.FRED = FRED.FRED(verbose=self.verbose) self.name = 'chs' self._info = '''CHS NONNA 10m and 100m Bathymetric Survey Grids; Non-Navigational gridded bathymetric data based on charts and soundings.''' self._title = '''Bathymetric data from CHS''' self._usage = '''< chs >''' self._urls = [self._chs_info_url, self._chs_url, self._chs_grid_cap] self._pdate_if_not_in_FRED()
def run(self): '''Run the COPERNICUS DEM fetching module''' if self.datatype is not None: self.where.append("DataType = '{}'".format(self.datatype)) for surv in FRED._filter_FRED(self): for i in surv['DataLink'].split(','): self.results.append([ i, os.path.join(self._outdir, i.split('/')[-1].split('?')[0]), surv['DataType'] ]) #self.results.append([i, i.split('/')[-1].split('?')[0], surv['DataType']]) return (self)
def __init__(self, where=[], **kwargs): super().__init__(**kwargs) self._emodnet_grid_url = 'https://ows.emodnet-bathymetry.eu/wcs?' self._emodnet_grid_cap = 'https://ows.emodnet-bathymetry.eu/wms?request=GetCapabilities&service=WMS' self._emodnet_help_url = 'https://portal.emodnet-bathymetry.eu/help/help.html' self._outdir = os.path.join(os.getcwd(), 'emodnet') self.where = where self.FRED = FRED.FRED(verbose=self.verbose) self.name = 'emodnet' self._info = 'European Bathymetry/Topographic data from EMODNET' self._title = '''EMODNET Elevation Data.''' self._usage = '''< emodnet >''' self._urls = [ self._emodnet_help_url, self._emodnet_grid_url, self._emodnet_grid_cap ] self.update_if_not_in_FRED()
def __init__(self, where='', datatype=None, **kwargs): super().__init__(**kwargs) self._charts_url = 'https://www.charts.noaa.gov/' self._enc_data_catalog = 'https://charts.noaa.gov/ENCs/ENCProdCat_19115.xml' self._rnc_data_catalog = 'https://charts.noaa.gov/RNCs/RNCProdCat_19115.xml' self._urls = [self._enc_data_catalog, self._rnc_data_catalog] self._outdir = os.path.join(os.getcwd(), 'charts') self._dt_xml = { 'ENC':self._enc_data_catalog, 'RNC':self._rnc_data_catalog } self.where = [where] if len(where) > 0 else [] self.datatype = datatype self.name = 'charts' self.v_datum = 'mhw' self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self.update_if_not_in_FRED()
def __init__(self, where='', datatype=None, **kwargs): super().__init__(**kwargs) self.cop30_rurl = 'https://opentopography.s3.sdsc.edu/minio/raster/COP30/COP30_hh/' self.cop30_url = 'https://opentopography.s3.sdsc.edu/minio/download/raster/COP30/COP30_hh/' self.cop30_vrt_url = 'https://opentopography.s3.sdsc.edu/minio/download/raster/COP30/COP30_hh.vrt?token=' self.cop_10_url = 'https://gisco-services.ec.europa.eu/dem/copernicus/outD/' self.cop_10_aux_url = 'https://gisco-services.ec.europa.eu/dem/copernicus/outA/' self.cop_10_web = 'https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/elevation/copernicus-dem/elevation' self.where = [where] if len(where) > 0 else [] self.datatype = datatype self._outdir = os.path.join(os.getcwd(), 'copernicus') self.name = 'copernicus' self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0', 'referer': 'https://opentopography.s3.sdsc.edu/minio/raster/COP30/COP30_hh/' } self.update_if_not_in_FRED()
def run(self): for surv in FRED._filter_FRED(self): status = f_utils.Fetch(surv['IndexLink']).fetch_ftp_file( v_zip, verbose=self.verbose) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break try: v_ds = ogr.Open(v_shp) except: v_ds = None status = -1 if v_ds is not None: layer = v_ds.GetLayer() try: self.FRED.layer.SetAttributeFilter( "Name = '{}'".format(name)) except: pass fcount = layer.GetFeatureCount() for f in range(0, fcount): feature = layer[f] if data_link is not None: geom = feature.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): data_link = feature.GetField('Ftp_dtm').replace( 'http', 'ftp') self.results.append([ data_link, os.path.join(self._outdir, data_link.split('/')[-1]), surv['DataType'] ]) utils.remove_glob(v_zip, *v_shps)
def run(self): print(self.arctic_region) for surv in FRED._filter_FRED(self): v_zip = os.path.basename(self._arctic_dem_index_url) status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True) utils.remove_glob(v_zip, *v_shps) v_shp = 'arctic_tmp.shp' v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj'] try: v_ds = ogr.Open(v_shp) except: v_ds = None status = -1 if v_ds is not None: layer = v_ds.GetLayer() try: self.FRED.layer.SetAttributeFilter("Name = '{}'".format(name)) except: pass fcount = layer.GetFeatureCount() for f in range(0, fcount): feature = layer[f] #if data_link is not None: geom = feature.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): #data_link = feature.GetField('Ftp_dtm').replace('http', 'ftp') data_link = feature.GetField('fileurl') self.results.append([data_link, os.path.join(self._outdir, data_link.split('/')[-1]), surv['DataType']]) utils.remove_glob(*v_shps) return(self)
def run(self): if self.datatype is not None: self.where.append("DataType LIKE '%{}%'".format(self.datatype)) for surv in FRED._filter_FRED(self): if self.callback(): break surv_shp_zip = os.path.basename(surv['IndexLink']) if f_utils.Fetch( surv['IndexLink'], callback=self.callback, verbose=self.verbose).fetch_file(surv_shp_zip) == 0: v_shps = utils.p_unzip(surv_shp_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v #try: v_ds = ogr.Open(v_shp) slay1 = v_ds.GetLayer(0) for sf1 in slay1: geom = sf1.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): tile_url = sf1.GetField('URL').strip() self.results.append([ tile_url, os.path.join( self._outdir, '{}/{}'.format(surv['ID'], tile_url.split('/')[-1])), surv['DataType'] ]) v_ds = slay1 = None #except: pass utils.remove_glob(surv_shp_zip, *v_shps)
def run(self): #, f = None, e = None, q = None): """parse the tnm results from FRED""" e = self.extents.split(',') if self.extents is not None else None f = self.formats.split(',') if self.formats is not None else None q = self.q for surv in FRED._filter_FRED(self): #print(surv) offset = 0 total = 0 while True: _dataset_results = [] _data = { 'bbox': self.region.format('bbox'), 'max': 100, 'offset': offset } if q is not None: _data['q'] = str(q) if f is None: _data['prodFormats'] = surv['Etcetra'] else: _data['prodFormats'] = ','.join(f) if e is None: e = [] _req = f_utils.Fetch(surv['DataLink']).fetch_req(params=_data) if _req is not None and _req.status_code == 200: try: _dataset_results = _req.json() total = _dataset_results['total'] except ValueError: utils.echo_error_msg('tnm server error, try again') except Exception as e: utils.echo_error_msg('error, {}'.format(e)) if len(_dataset_results) > 0: for item in _dataset_results['items']: if _data['prodFormats'] is None: fmts = [] else: fmts = _data['prodFormats'].split(',') f_url = None if len(e) > 0: for extent in e: if item['extent'] == extent: for fmt in fmts: if fmt in item['urls'].keys(): f_url = item['urls'][fmt] break if f_url is None: f_url = item['downloadURL'] self.results.append([ f_url, os.path.join(self._outdir, f_url.split('/')[-1]), surv['DataType'] ]) else: for fmt in fmts: if fmt in item['urls'].keys(): f_url = item['urls'][fmt] break if f_url is None: f_url = item['downloadURL'] self.results.append([ f_url, os.path.join(self._outdir, f_url.split('/')[-1]), surv['DataType'] ]) offset += 100 if offset >= total: break return (self)