def run(self): '''Run the NASADEM DEM fetching module''' for surv in FRED._filter_FRED(self): for i in surv['DataLink'].split(','): self.results.append([i, os.path.join(self._outdir, i.split('/')[-1].split('?')[0]), surv['DataType']]) return(self)
def run(self): """Search for data in the reference vector file""" if self.datatype is not None: self.where.append("DataType = '{}'".format(self.datatype)) for surv in FRED._filter_FRED(self): for i in surv['DataLink'].split(','): self.results.append([i, os.path.join(self._outdir, i.split('/')[-1]), surv['DataType']])
def _parse_prods_results(self, r, f=None, e=None, q=None): for surv in FRED._filter_FRED(self): for d in surv['DataLink'].split(','): if d != '': self.results.append([ d, os.path.join(self._outdir, d.split('/')[-1]), surv['DataType'] ])
def run(self): """Search the NOS reference vector and append the results to the results list.""" for surv in FRED._filter_FRED(self): for i in surv['DataLink'].split(','): if i != '': dt = self._data_type(i) if self.datatype is not None: if self.datatype.lower() in dt: self.results.append([i, os.path.join(self._outdir, i.split('/')[-1]), surv['DataType']]) else: self.results.append([i, os.path.join(self._outdir, i.split('/')[-1]), surv['DataType']])
def run(self): emod_wcs = f_utils.WCS(self._emodnet_grid_url) for surv in FRED._filter_FRED(self): d = emod_wcs._describe_coverage(surv['ID']) if d is not None: ds_region = emod_wcs._get_coverage_region(d) if regions_intersect_ogr_p(self.region, ds_region): emod_url = emod_wcs._get_coverage_url(surv['ID'], region=self.region) outf = 'emodnet_{}.tif'.format(self.region.format('fn')) self.results.append([ emod_url, os.path.join(self._outdir, outf), surv['DataType'] ])
def run(self): """Search for data in the reference vector file""" for surv in FRED._filter_FRED(self): #wcs_url = "{}?request=GetCoverage&version=1.0.0&service=WCS&coverage={}&bbox={}&format=NetCDF3"\ # .format(surv['IndexLink'], surv['Etcetra'], self.region.format('bbox')) #print(wcs_url) #self.results.append([wcs_url, surv['DataLink'].split(',')[0].split('/')[-1], surv['DataType']]) for d in surv['DataLink'].split(','): if d != '': self.results.append([ d, os.path.join(self._outdir, d.split('/')[-1]), surv['DataType'] ])
def run(self): chs_wcs = f_utils.WCS(self._chs_url) for surv in FRED._filter_FRED(self): d = chs_wcs._describe_coverage(surv['ID']) if d is not None: ds_region = chs_wcs._get_coverage_region(d) if regions_intersect_ogr_p(self.region, ds_region): chs_url = chs_wcs._get_coverage_url( chs_wcs.fix_coverage_id(surv['ID']), region=self.region) outf = '{}_{}.tif'.format( surv['ID'].replace(' ', '_').replace('caris__', 'chs_'), self.region.format('fn')) self.results.append([chs_url, outf, surv['DataType']])
def run(self): '''Run the COPERNICUS DEM fetching module''' if self.datatype is not None: self.where.append("DataType = '{}'".format(self.datatype)) for surv in FRED._filter_FRED(self): for i in surv['DataLink'].split(','): self.results.append([ i, os.path.join(self._outdir, i.split('/')[-1].split('?')[0]), surv['DataType'] ]) #self.results.append([i, i.split('/')[-1].split('?')[0], surv['DataType']]) return (self)
def run(self): for surv in FRED._filter_FRED(self): status = f_utils.Fetch(surv['IndexLink']).fetch_ftp_file( v_zip, verbose=self.verbose) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break try: v_ds = ogr.Open(v_shp) except: v_ds = None status = -1 if v_ds is not None: layer = v_ds.GetLayer() try: self.FRED.layer.SetAttributeFilter( "Name = '{}'".format(name)) except: pass fcount = layer.GetFeatureCount() for f in range(0, fcount): feature = layer[f] if data_link is not None: geom = feature.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): data_link = feature.GetField('Ftp_dtm').replace( 'http', 'ftp') self.results.append([ data_link, os.path.join(self._outdir, data_link.split('/')[-1]), surv['DataType'] ]) utils.remove_glob(v_zip, *v_shps)
def run(self): print(self.arctic_region) for surv in FRED._filter_FRED(self): v_zip = os.path.basename(self._arctic_dem_index_url) status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip) v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v break utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True) utils.remove_glob(v_zip, *v_shps) v_shp = 'arctic_tmp.shp' v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj'] try: v_ds = ogr.Open(v_shp) except: v_ds = None status = -1 if v_ds is not None: layer = v_ds.GetLayer() try: self.FRED.layer.SetAttributeFilter("Name = '{}'".format(name)) except: pass fcount = layer.GetFeatureCount() for f in range(0, fcount): feature = layer[f] #if data_link is not None: geom = feature.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): #data_link = feature.GetField('Ftp_dtm').replace('http', 'ftp') data_link = feature.GetField('fileurl') self.results.append([data_link, os.path.join(self._outdir, data_link.split('/')[-1]), surv['DataType']]) utils.remove_glob(*v_shps) return(self)
def run(self): if self.datatype is not None: self.where.append("DataType LIKE '%{}%'".format(self.datatype)) for surv in FRED._filter_FRED(self): if self.callback(): break surv_shp_zip = os.path.basename(surv['IndexLink']) if f_utils.Fetch( surv['IndexLink'], callback=self.callback, verbose=self.verbose).fetch_file(surv_shp_zip) == 0: v_shps = utils.p_unzip(surv_shp_zip, ['shp', 'shx', 'dbf', 'prj']) v_shp = None for v in v_shps: if v.split('.')[-1] == 'shp': v_shp = v #try: v_ds = ogr.Open(v_shp) slay1 = v_ds.GetLayer(0) for sf1 in slay1: geom = sf1.GetGeometryRef() if geom.Intersects(self.region.export_as_geom()): tile_url = sf1.GetField('URL').strip() self.results.append([ tile_url, os.path.join( self._outdir, '{}/{}'.format(surv['ID'], tile_url.split('/')[-1])), surv['DataType'] ]) v_ds = slay1 = None #except: pass utils.remove_glob(surv_shp_zip, *v_shps)
def run(self): #, f = None, e = None, q = None): """parse the tnm results from FRED""" e = self.extents.split(',') if self.extents is not None else None f = self.formats.split(',') if self.formats is not None else None q = self.q for surv in FRED._filter_FRED(self): #print(surv) offset = 0 total = 0 while True: _dataset_results = [] _data = { 'bbox': self.region.format('bbox'), 'max': 100, 'offset': offset } if q is not None: _data['q'] = str(q) if f is None: _data['prodFormats'] = surv['Etcetra'] else: _data['prodFormats'] = ','.join(f) if e is None: e = [] _req = f_utils.Fetch(surv['DataLink']).fetch_req(params=_data) if _req is not None and _req.status_code == 200: try: _dataset_results = _req.json() total = _dataset_results['total'] except ValueError: utils.echo_error_msg('tnm server error, try again') except Exception as e: utils.echo_error_msg('error, {}'.format(e)) if len(_dataset_results) > 0: for item in _dataset_results['items']: if _data['prodFormats'] is None: fmts = [] else: fmts = _data['prodFormats'].split(',') f_url = None if len(e) > 0: for extent in e: if item['extent'] == extent: for fmt in fmts: if fmt in item['urls'].keys(): f_url = item['urls'][fmt] break if f_url is None: f_url = item['downloadURL'] self.results.append([ f_url, os.path.join(self._outdir, f_url.split('/')[-1]), surv['DataType'] ]) else: for fmt in fmts: if fmt in item['urls'].keys(): f_url = item['urls'][fmt] break if f_url is None: f_url = item['downloadURL'] self.results.append([ f_url, os.path.join(self._outdir, f_url.split('/')[-1]), surv['DataType'] ]) offset += 100 if offset >= total: break return (self)