def run(self, *args, **kwargs): """ Run the command: Acquire Australian soils data. Arguments: verbose -- boolean Produce verbose output. Default: False. overwrite -- boolean Overwrite existing output. Default: False. """ verbose = kwargs.get('verbose', False) overwrite = kwargs.get('overwrite', False) self.checkMetadata() bbox = bboxFromString(self.studyArea['bbox_wgs84']) try: rasters = getSoilsRasterDataForBoundingBox( self.context.config, self.context.projectDir, bbox, srs=self.studyArea['dem_srs'], resx=self.studyArea['dem_res_x'], resy=self.studyArea['dem_res_y'], overwrite=overwrite, verbose=verbose, outfp=self.outfp) except Exception as e: traceback.print_exc(file=self.outfp) raise RunException(e) # Write metadata entries cmdline = GenericMetadata.getCommandLine() for attr in list(rasters.keys()): (filepath, url) = rasters[attr] filename = os.path.basename(filepath) asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = attr asset.dcIdentifier = filename asset.dcSource = url asset.dcTitle = attr asset.dcPublisher = soilwcs.DC_PUBLISHER asset.dcDescription = cmdline asset.writeToMetadata(self.context) # Write processing history GenericMetadata.appendProcessingHistoryItem(self.context, cmdline)
def run(self, *args, **kwargs): """ Run the command: Acquire Australian soils data. Arguments: verbose -- boolean Produce verbose output. Default: False. overwrite -- boolean Overwrite existing output. Default: False. """ verbose = kwargs.get('verbose', False) overwrite = kwargs.get('overwrite', False) self.checkMetadata() bbox = bboxFromString(self.studyArea['bbox_wgs84']) try: rasters = getSoilsRasterDataForBoundingBox(self.context.config, self.context.projectDir, bbox, srs=self.studyArea['dem_srs'], resx=self.studyArea['dem_res_x'], resy=self.studyArea['dem_res_y'], overwrite=overwrite, verbose=verbose, outfp=self.outfp) except Exception as e: traceback.print_exc(file=self.outfp) raise RunException(e) # Write metadata entries cmdline = GenericMetadata.getCommandLine() for attr in rasters.keys(): (filepath, url) = rasters[attr] filename = os.path.basename(filepath) asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = attr asset.dcIdentifier = filename asset.dcSource = url asset.dcTitle = attr asset.dcPublisher = soilwcs.DC_PUBLISHER asset.dcDescription = cmdline asset.writeToMetadata(self.context) # Write processing history GenericMetadata.appendProcessingHistoryItem(self.context, cmdline)
s_srs=t_srs, t_srs=t_srs, \ trX=demResolutionX, trY=demResolutionY) sys.stdout.write('done\n') else: shutil.move(tmpDEMFilepath, demFilepath) # Write metadata GenericMetadata.writeStudyAreaEntry(context, 'dem_res_x', demResolutionX) GenericMetadata.writeStudyAreaEntry(context, 'dem_res_y', demResolutionY) GenericMetadata.writeStudyAreaEntry(context, 'dem_srs', t_srs) # Get rows and columns for DEM (columns, rows) = getDimensionsForRaster(demFilepath) GenericMetadata.writeStudyAreaEntry(context, 'dem_columns', columns) GenericMetadata.writeStudyAreaEntry(context, 'dem_rows', rows) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'dem' asset.dcIdentifier = demFilename asset.dcSource = demURL asset.dcTitle = demwcs.COVERAGE_DESC[args.demType] asset.dcPublisher = 'Geoscience Australia' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline) # Clean-up deleteGeoTiff(tmpDEMFilepath)
def run(self, *args, **kwargs): """ Run the command: Acquire USGS DEM data. Arguments: coverage -- string Source dataset from which DEM tile should be extracted. outfile -- string The name of the DEM file to be written. File extension ".tif" will be added. demResolution list<float>[2] Two floating point numbers representing the desired X and Y output resolution of soil property raster maps; unit: meters. srs -- string Target spatial reference system of output, in EPSG:num format. verbose -- boolean Produce verbose output. Default: False. overwrite -- boolean Overwrite existing output. Default: False. """ coverage = kwargs.get('coverage', self.DEFAULT_COVERAGE) outfile = kwargs.get('outfile', None) demResolution = kwargs.get('demResolution', None) srs = kwargs.get('srs', None) verbose = kwargs.get('verbose', False) overwrite = kwargs.get('overwrite', False) self.checkMetadata() bbox = bboxFromString(self.studyArea['bbox_wgs84']) if not outfile: outfile = 'DEM' demFilename = "%s.tif" % (outfile) demResolutionX = demResolutionY = None if demResolution: demResolutionX = demResolution[0] demResolutionY = demResolution[1] if srs: if not isValidSrs(srs): msg = "ERROR: '%s' is not a valid spatial reference. Spatial reference must be of the form 'EPSG:XXXX', e.g. 'EPSG:32617'. For more information, see: http://www.spatialreference.org/" % (srs,) raise RunException(msg) else: # Default for UTM (centerLon, centerLat) = calculateBoundingBoxCenter(bbox) (utmZone, isNorth) = getUTMZoneFromCoordinates(centerLon, centerLat) srs = getEPSGStringForUTMZone(utmZone, isNorth) try: (dataFetched, urlFetched) = ecohydrolib.usgs.demwcs.getDEMForBoundingBox(self.context.config, self.context.projectDir, demFilename, bbox, srs, coverage=coverage, resx=demResolutionX, resy=demResolutionY, scale=0.01, overwrite=overwrite, verbose=verbose, outfp=self.outfp) except Exception as e: traceback.print_exc(file=self.outfp) raise RunException(e) if not dataFetched: raise RunException("Failed to download DEM data from URL {0}".format(urlFetched)) # Write metadata entries cmdline = GenericMetadata.getCommandLine() # Write metadata demFilepath = os.path.join(self.context.projectDir, demFilename) demSrs = getSpatialReferenceForRaster(demFilepath) GenericMetadata.writeStudyAreaEntry(self.context, 'dem_res_x', demSrs[0]) GenericMetadata.writeStudyAreaEntry(self.context, 'dem_res_y', demSrs[1]) GenericMetadata.writeStudyAreaEntry(self.context, 'dem_srs', srs) # Get rows and columns for DEM (columns, rows) = getDimensionsForRaster(demFilepath) GenericMetadata.writeStudyAreaEntry(self.context, 'dem_columns', columns) GenericMetadata.writeStudyAreaEntry(self.context, 'dem_rows', rows) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'dem' asset.dcIdentifier = demFilename asset.dcSource = urlFetched asset.dcTitle = "Digital Elevation Model ({0})".format(coverage) asset.dcPublisher = 'U.S. Geological Survey' asset.dcDescription = cmdline asset.processingNotes = "Elevation values rescaled from centimeters to meters. " asset.processingNotes += "Spatial grid resampled to {srs} with X resolution {xres} and Y resolution {yres}." asset.processingNotes = asset.processingNotes.format(srs=srs, xres=demSrs[0], yres=demSrs[1]) asset.writeToMetadata(self.context) # Write processing history GenericMetadata.appendProcessingHistoryItem(self.context, cmdline)
else: publisher = 'SELF PUBLISHED' if not context.config.has_option('GDAL/OGR', 'PATH_OF_OGR2OGR'): sys.exit("Config file %s does not define option %s in section %s" & \ (args.configfile, 'GDAL/OGR', 'PATH_OF_OGR2OGR')) if not os.access(args.studyAreaLayer, os.R_OK): raise IOError(errno.EACCES, "Not allowed to read input study area %s" (args.demfile, )) inStudyAreaPath = os.path.abspath(args.studyAreaLayer) shpFilename = convertFeatureLayerToShapefile(context.config, context.projectDir, inStudyAreaPath, "studyarea", overwrite=args.overwrite) # Write metadata asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'study_area_shapefile' asset.dcIdentifier = shpFilename asset.dcSource = "file://%s" % (inStudyAreaPath, ) asset.dcTitle = 'Study area shapefile' asset.dcPublisher = publisher asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
demSpatialMetadata = getSpatialReferenceForRaster(demFilepath) GenericMetadata.writeStudyAreaEntry(context, "dem_res_x", demSpatialMetadata[0]) GenericMetadata.writeStudyAreaEntry(context, "dem_res_y", demSpatialMetadata[1]) GenericMetadata.writeStudyAreaEntry(context, "dem_srs", demSpatialMetadata[5]) # Get rows and columns for DEM demFilepath = os.path.join(context.projectDir, demFilename) (columns, rows) = getDimensionsForRaster(demFilepath) GenericMetadata.writeStudyAreaEntry(context, "dem_columns", columns) GenericMetadata.writeStudyAreaEntry(context, "dem_rows", rows) # Write provenance inputDEMURL = "file://%s" % (inDEMPath, ) asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'dem' asset.dcIdentifier = demFilename asset.dcSource = inputDEMURL asset.dcTitle = 'Digital Elevation Model' asset.dcPublisher = publisher asset.dcDescription = cmdline asset.writeToMetadata(context) asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'study_area_shapefile' asset.dcIdentifier = shpFilename asset.dcSource = inputDEMURL asset.dcTitle = 'Study area shapefile' asset.dcPublisher = publisher asset.dcDescription = cmdline
else: # Download NLCD from WCS sys.stdout.write("Downloading NLCD via WCS from %s..." % (HOST, )) sys.stdout.flush() (returnCode, nlcdURL) = getNLCDForBoundingBox(context.config, context.projectDir, tileFilename, bbox=bbox, resx=outputrasterresolutionX, resy=outputrasterresolutionY, coverage='NLCD2006', srs=srs) assert (returnCode) sys.stdout.write('done\n') # Write metadata GenericMetadata.writeStudyAreaEntry(context, "landcover_type", "NLCD2006") # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'landcover' asset.dcIdentifier = tileFilename asset.dcSource = nlcdURL asset.dcTitle = 'The National Landcover Database 2006' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
'Geting catchment area draining through gage using NHDPlus webservice...' ) sys.stdout.flush() try: (shapeFilename, source) = getCatchmentFeaturesForStreamflowGage( context.config, context.projectDir, outfile, reachcode, measure, format=OGR_SHAPEFILE_DRIVER_NAME) writeMetadata = True except Exception as e: sys.exit(str(e)) sys.stdout.write('done\n') if writeMetadata: # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'study_area_shapefile' asset.dcIdentifier = shapeFilename asset.dcSource = source asset.dcTitle = 'Study area shapefile' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
def run(self, *args, **kwargs): """ Run the command: Acquire NLCD data from USGS WCS web service. Arguments: lctype -- string Source dataset from which NLCD tile should be extracted. outfile -- string The name of the NLCD file to be written. File extension ".tif" will be added. verbose -- boolean Produce verbose output. Default: False. overwrite -- boolean Overwrite existing output. Default: False. """ lctype = kwargs.get('lctype', DEFAULT_LC_TYPE) outfile = kwargs.get('outfile', None) verbose = kwargs.get('verbose', False) overwrite = kwargs.get('overwrite', False) if lctype not in ecohydrolib.usgs.nlcdwcs.LC_TYPE_TO_COVERAGE: msg = "Land cover type {lctype} is not in the list of supported types {types}" raise CommandException( msg.format(lctype=lctype, types=ecohydrolib.usgs.nlcdwcs.LC_TYPE_TO_COVERAGE)) self.checkMetadata() demFilename = self.manifest['dem'] demFilepath = os.path.join(self.context.projectDir, demFilename) demFilepath = os.path.abspath(demFilepath) bbox = getRasterExtentAsBbox(demFilepath) if not outfile: outfile = 'NLCD' try: (resp, urlFetched, fname) = getNLCDRasterDataForBoundingBox( self.context.config, self.context.projectDir, bbox, coverage=LC_TYPE_TO_COVERAGE[lctype], filename=outfile, srs=self.studyArea['dem_srs'], resx=self.studyArea['dem_res_x'], resy=self.studyArea['dem_res_y'], overwrite=overwrite, verbose=verbose, outfp=self.outfp) except Exception as e: traceback.print_exc(file=self.outfp) raise RunException(e) if not resp: raise RunException( "Failed to download NLCD data from URL {0}".format(urlFetched)) # Write metadata entries cmdline = GenericMetadata.getCommandLine() GenericMetadata.writeStudyAreaEntry(self.context, "landcover_type", lctype) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'landcover' asset.dcIdentifier = fname asset.dcSource = urlFetched asset.dcTitle = "The National Landcover Database: {0}".format(lctype) asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(self.context) # Write processing history GenericMetadata.appendProcessingHistoryItem(self.context, cmdline)
context = Context(args.projectDir, configFile) manifest = GenericMetadata.readManifestEntries(context) if 'soil_features' in manifest: if args.overwrite: sys.stdout.write('Deleting existing SSURGO features shapefile\n') sys.stdout.flush() shpFilepath = os.path.join( context.projectDir, manifest['soil_features'] ) deleteShapefile(shpFilepath) else: sys.exit( textwrap.fill('SSURGO features already exist in project directory. Use --overwrite option to overwrite.') ) shpFilename = args.shpFilename ###'MapunitPolyExtended.shp' # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'soil_features' asset.dcIdentifier = shpFilename asset.dcSource = 'from hydroterre web service with GI' asset.dcTitle = 'GI with SSURGO soils data' asset.dcPublisher = 'USDA' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
context = Context(args.projectDir, configFile) if args.publisher: publisher = args.publisher else: publisher = 'SELF PUBLISHED' if not context.config.has_option('GDAL/OGR', 'PATH_OF_OGR2OGR'): sys.exit("Config file %s does not define option %s in section %s" & \ (args.configfile, 'GDAL/OGR', 'PATH_OF_OGR2OGR')) if not os.access(args.studyAreaLayer, os.R_OK): raise IOError(errno.EACCES, "Not allowed to read input study area %s" (args.demfile,)) inStudyAreaPath = os.path.abspath(args.studyAreaLayer) shpFilename = convertFeatureLayerToShapefile(context.config, context.projectDir, inStudyAreaPath, "studyarea", overwrite=args.overwrite) # Write metadata asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'study_area_shapefile' asset.dcIdentifier = shpFilename asset.dcSource = "file://%s" % (inStudyAreaPath,) asset.dcTitle = 'Study area shapefile' asset.dcPublisher = publisher asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
manifest = GenericMetadata.readManifestEntries(context) shpFilename = manifest['soil_features'] layerName = os.path.splitext(shpFilename)[0] # Get study area parameters studyArea = GenericMetadata.readStudyAreaEntries(context) outputrasterresolutionX = studyArea['dem_res_x'] outputrasterresolutionY = studyArea['dem_res_y'] # Truncate attributes to 10 characters because shapefiles rely on ancient technology sys.stdout.write('Generating soil property maps by rasterizing SURGO features...') sys.stdout.flush() attrList = [elem[:10] for elem in ecohydrolib.ssurgo.attributequery.attributeListNumeric] rasterFiles = rasterizeSSURGOFeatures(config=context.config, outputDir=context.projectDir, featureFilename=shpFilename, featureLayername=layerName, \ featureAttrList=attrList, \ rasterResolutionX=outputrasterresolutionX, rasterResolutionY=outputrasterresolutionY) sys.stdout.write('done\n') # Write metadata entries for attr in rasterFiles.keys(): asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = "soil_raster_%s" % (attr,) asset.dcIdentifier = rasterFiles[attr] asset.dcSource = ssurgoProvenance.dcSource asset.dcTitle = attr asset.dcPublisher = ssurgoProvenance.dcSource asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
# Resample DEM to target srs and resolution resampleRaster(context.config, context.projectDir, tmpDEMFilepath, demFilename, \ s_srs=demtile.DEFAULT_CRS, t_srs=t_srs, \ trX=demResolutionX, trY=demResolutionY) # Write metadata GenericMetadata.writeStudyAreaEntry(context, "dem_res_x", demResolutionX) GenericMetadata.writeStudyAreaEntry(context, "dem_res_y", demResolutionY) GenericMetadata.writeStudyAreaEntry(context, "dem_srs", t_srs) # Get rows and columns for DEM (columns, rows) = getDimensionsForRaster(demFilepath) GenericMetadata.writeStudyAreaEntry(context, "dem_columns", columns) GenericMetadata.writeStudyAreaEntry(context, "dem_rows", rows) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'dem' asset.dcIdentifier = demFilename asset.dcSource = 'http://eros.usgs.gov/#/Find_Data/Products_and_Data_Available/gtopo30/hydro/namerica' asset.dcTitle = 'Digital Elevation Model from HYDRO1k' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline) # Clean-up deleteGeoTiff(tmpDEMFilepath)
rasterize.deleteSoilRasters(context, manifest) sys.stdout.write('done\n') # Get study area parameters studyArea = GenericMetadata.readStudyAreaEntries(context) outputrasterresolutionX = studyArea['dem_res_x'] outputrasterresolutionY = studyArea['dem_res_y'] # Truncate attributes to 10 characters because shapefiles rely on ancient technology sys.stdout.write( 'Generating soil property maps by rasterizing SURGO features...') sys.stdout.flush() attrList = [elem[:10] for elem in rasterize.RASTER_ATTRIBUTES] rasterFiles = rasterize.rasterizeSSURGOFeatures(config=context.config, outputDir=context.projectDir, featureFilename=shpFilename, featureLayername=layerName, \ featureAttrList=attrList, \ rasterResolutionX=outputrasterresolutionX, rasterResolutionY=outputrasterresolutionY) sys.stdout.write('done\n') # Write metadata entries for attr in list(rasterFiles.keys()): asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = "soil_raster_%s" % (attr, ) asset.dcIdentifier = rasterFiles[attr] asset.dcSource = ssurgoProvenance.dcSource asset.dcTitle = attr asset.dcPublisher = ssurgoProvenance.dcSource asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
demFilename = manifest['dem'] demFilepath = os.path.join(context.projectDir, demFilename) layerName = os.path.splitext(shpFilename)[0] # Get study area parameters studyArea = GenericMetadata.readStudyAreaEntries(context) outputrasterresolutionX = studyArea['dem_res_x'] outputrasterresolutionY = studyArea['dem_res_y'] # Truncate attributes to 10 characters because shapefiles rely on ancient technology sys.stdout.write('Generating soil property maps using SOLIM...') sys.stdout.flush() attrList = [elem[:10] for elem in attributequery.ATTRIBUTE_LIST_NUMERIC] rasterFiles = inferSoilPropertiesForSSURGOAndTerrainData(config=context.config, outputDir=context.projectDir, \ shpFilepath=shpFilepath, demFilepath=demFilepath, \ featureAttrList=attrList) sys.stdout.write('done\n') # Write metadata entries for attr in list(rasterFiles.keys()): asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = "soil_raster_%s" % (attr, ) asset.dcIdentifier = rasterFiles[attr] asset.dcSource = 'http://solim.geography.wisc.edu' asset.dcTitle = attr asset.dcPublisher = 'Department of Geography, UW-Madison' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
s_srs=t_srs, t_srs=t_srs, \ trX=demResolutionX, trY=demResolutionY) sys.stdout.write('done\n') else: shutil.move(tmpDEMFilepath, demFilepath) # Write metadata GenericMetadata.writeStudyAreaEntry(context, 'dem_res_x', demResolutionX) GenericMetadata.writeStudyAreaEntry(context, 'dem_res_y', demResolutionY) GenericMetadata.writeStudyAreaEntry(context, 'dem_srs', t_srs) # Get rows and columns for DEM (columns, rows) = getDimensionsForRaster(demFilepath) GenericMetadata.writeStudyAreaEntry(context, 'dem_columns', columns) GenericMetadata.writeStudyAreaEntry(context, 'dem_rows', rows) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'dem' asset.dcIdentifier = demFilename asset.dcSource = demURL asset.dcTitle = 'Digital Elevation Model' asset.dcPublisher = 'GeoBrain' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline) # Clean-up deleteGeoTiff(tmpDEMFilepath)
outfile = args.outfile else: outfile = "catchment" # Get provenance data for gage gageProvenance = [i for i in GenericMetadata.readAssetProvenanceObjects(context) if i.name == 'gage'][0] if gageProvenance is None: sys.exit("Unable to load gage provenance information from metadata") # Get study area parameters studyArea = GenericMetadata.readStudyAreaEntries(context) reachcode = studyArea['nhd_gage_reachcode'] measure = studyArea['nhd_gage_measure_pct'] shapeFilename = "%s.shp" % (outfile) shapeFilepath = os.path.join(context.projectDir, shapeFilename) if not os.path.exists(shapeFilepath): getCatchmentShapefileForGage(context.config, context.projectDir, shapeFilename, reachcode, measure) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'study_area_shapefile' asset.dcIdentifier = shapeFilename asset.dcSource = gageProvenance.dcSource # Take from gage asset.dcTitle = 'Study area shapefile' asset.dcPublisher = gageProvenance.dcPublisher asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
format=OGR_SHAPEFILE_DRIVER_NAME) source = 'http://www.horizon-systems.com/NHDPlus/NHDPlusV2_home.php' writeMetadata = True sys.stdout.write('done\n') else: sys.stdout.write('Geting catchment area draining through gage using NHDPlus webservice...') sys.stdout.flush() try: (shapeFilename, source) = getCatchmentFeaturesForStreamflowGage(context.config, context.projectDir, outfile, reachcode, measure, format=OGR_SHAPEFILE_DRIVER_NAME) writeMetadata = True except Exception as e: sys.exit( str(e) ) sys.stdout.write('done\n') if writeMetadata: # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'study_area_shapefile' asset.dcIdentifier = shapeFilename asset.dcSource = source asset.dcTitle = 'Study area shapefile' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
gage_lat = response['gage_lat'] gage_lon = response['gage_lon'] sys.stdout.write('done\n') # Write gage coordinates to a shapefile in the project directory shpFilename = writeCoordinatePairsToPointShapefile(context.projectDir, "gage", "gage_id", [args.gageid], [(gage_lon, gage_lat)]) # Write study area metadata GenericMetadata.writeStudyAreaEntry(context, 'gage_id_attr', 'gage_id') GenericMetadata.writeStudyAreaEntry(context, 'gage_id', args.gageid) GenericMetadata.writeStudyAreaEntry(context, 'nhd_gage_reachcode', reachcode) GenericMetadata.writeStudyAreaEntry(context, 'nhd_gage_measure_pct', measure) GenericMetadata.writeStudyAreaEntry(context, 'gage_lat_wgs84', gage_lat) GenericMetadata.writeStudyAreaEntry(context, 'gage_lon_wgs84', gage_lon) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'gage' asset.dcIdentifier = shpFilename asset.dcSource = source asset.dcTitle = 'Streamflow gage' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
sys.stdout.flush() copyRasterToGeoTIFF(context.config, context.projectDir, inRasterPath, rasterFilename) sys.stdout.write('done\n') # Make sure extent of resampled raster is the same as the extent of the DEM newRasterMetadata = getDimensionsForRaster(rasterFilepath) if (not force) and ( (newRasterMetadata[0] != demColumns) or (newRasterMetadata[1] != demRows) ): # Extents to not match, roll back and bail out os.unlink(rasterFilepath) sys.exit(textwrap.fill("ERROR: Extent of raster dataset %s does not match extent of DEM in project directory %s. Use --force to override.") % (rasterFilename, context.projectDir)) # Write metadata if GenericMetadata.RASTER_TYPE_LC == args.type: GenericMetadata.writeStudyAreaEntry(context, "landcover_type", "custom") # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = args.type asset.dcIdentifier = rasterFilename asset.dcSource = "file://%s" % (inRasterPath,) asset.dcTitle = args.type asset.dcPublisher = publisher asset.dcDescription = cmdline asset.processingNotes = processingNotes asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
gage_lon = coords[0][0] gage_lat = coords[0][1] coordinates = (gage_lon, gage_lat) # Ensure gage coordinates are within bounding box if not isCoordinatePairInBoundingBox(bbox, coordinates): sys.exit("Gage coordinates %s, %s do not appear to lie within bounding box %s, %s, %s, %s" % ( str(gage_lon), str(gage_lat), str(bbox['minX']), str(bbox['minY']), str(bbox['maxX']), str(bbox['maxY']) ) ) shpFilename = writeCoordinatePairsToPointShapefile(context.projectDir, outfile, "gage_id", gageIDs, [coordinates]) sys.stdout.write('done\n') # Write metadata GenericMetadata.writeStudyAreaEntry(context, 'gage_id_attr', 'gage_id') GenericMetadata.writeStudyAreaEntry(context, 'gage_id', args.idValue) GenericMetadata.writeStudyAreaEntry(context, 'gage_lat_wgs84', gage_lat) GenericMetadata.writeStudyAreaEntry(context, 'gage_lon_wgs84', gage_lon) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'gage' asset.dcIdentifier = shpFilename asset.dcSource = "file://%s" % (inGagePath,) asset.dcTitle = 'Streamflow gage' asset.dcPublisher = publisher asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
def run(self, *args, **kwargs): """ Run the command: Acquire NLCD data from USGS WCS web service. Arguments: lctype -- string Source dataset from which NLCD tile should be extracted. outfile -- string The name of the NLCD file to be written. File extension ".tif" will be added. verbose -- boolean Produce verbose output. Default: False. overwrite -- boolean Overwrite existing output. Default: False. """ lctype = kwargs.get('lctype', DEFAULT_LC_TYPE) outfile = kwargs.get('outfile', None) verbose = kwargs.get('verbose', False) overwrite = kwargs.get('overwrite', False) if lctype not in ecohydrolib.usgs.nlcdwcs.LC_TYPE_TO_COVERAGE: msg = "Land cover type {lctype} is not in the list of supported types {types}" raise CommandException(msg.format(lctype=lctype, types=ecohydrolib.usgs.nlcdwcs.LC_TYPE_TO_COVERAGE)) self.checkMetadata() demFilename = self.manifest['dem'] demFilepath = os.path.join(self.context.projectDir, demFilename) demFilepath = os.path.abspath(demFilepath) bbox = getRasterExtentAsBbox(demFilepath) if not outfile: outfile = 'NLCD' try: (resp, urlFetched, fname) = getNLCDRasterDataForBoundingBox(self.context.config, self.context.projectDir, bbox, coverage=LC_TYPE_TO_COVERAGE[lctype], filename=outfile, srs=self.studyArea['dem_srs'], resx=self.studyArea['dem_res_x'], resy=self.studyArea['dem_res_y'], overwrite=overwrite, verbose=verbose, outfp=self.outfp) except Exception as e: traceback.print_exc(file=self.outfp) raise RunException(e) if not resp: raise RunException("Failed to download NLCD data from URL {0}".format(urlFetched)) # Write metadata entries cmdline = GenericMetadata.getCommandLine() GenericMetadata.writeStudyAreaEntry(self.context, "landcover_type", lctype) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'landcover' asset.dcIdentifier = fname asset.dcSource = urlFetched asset.dcTitle = "The National Landcover Database: {0}".format(lctype) asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(self.context) # Write processing history GenericMetadata.appendProcessingHistoryItem(self.context, cmdline)
os.unlink(rasterFilepath) sys.exit( textwrap.fill( "ERROR: Raster type %s must be the same extent as DEM" % (GenericMetadata.RASTER_TYPE_STREAM_BURNED_DEM, ))) if not force: # Extents to not match, roll back and bail out os.unlink(rasterFilepath) sys.exit( textwrap.fill( "ERROR: Extent of raster dataset %s does not match extent of DEM in project directory %s. Use --force to override." ) % (rasterFilename, context.projectDir)) # Write metadata if GenericMetadata.RASTER_TYPE_LC == args.type: GenericMetadata.writeStudyAreaEntry(context, "landcover_type", "custom") # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = args.type asset.dcIdentifier = rasterFilename asset.dcSource = "file://%s" % (inRasterPath, ) asset.dcTitle = args.type asset.dcPublisher = publisher asset.dcDescription = cmdline asset.processingNotes = processingNotes asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
measure = response['measure'] gage_lat = response['gage_lat'] gage_lon = response['gage_lon'] sys.stdout.write('done\n') # Write gage coordinates to a shapefile in the project directory shpFilename = writeCoordinatePairsToPointShapefile(context.projectDir, "gage", "gage_id", [args.gageid], [(gage_lon, gage_lat)]) # Write study area metadata GenericMetadata.writeStudyAreaEntry(context, 'gage_id_attr', 'gage_id') GenericMetadata.writeStudyAreaEntry(context, 'gage_id', args.gageid) GenericMetadata.writeStudyAreaEntry(context, 'nhd_gage_reachcode', reachcode) GenericMetadata.writeStudyAreaEntry(context, 'nhd_gage_measure_pct', measure) GenericMetadata.writeStudyAreaEntry(context, 'gage_lat_wgs84', gage_lat) GenericMetadata.writeStudyAreaEntry(context, 'gage_lon_wgs84', gage_lon) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'gage' asset.dcIdentifier = shpFilename asset.dcSource = source asset.dcTitle = 'Streamflow gage' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
# Get spatial metadata for DEM demSpatialMetadata = getSpatialReferenceForRaster(demFilepath) GenericMetadata.writeStudyAreaEntry(context, "dem_res_x", demSpatialMetadata[0]) GenericMetadata.writeStudyAreaEntry(context, "dem_res_y", demSpatialMetadata[1]) GenericMetadata.writeStudyAreaEntry(context, "dem_srs", demSpatialMetadata[5]) # Get rows and columns for DEM demFilepath = os.path.join(context.projectDir, demFilename) (columns, rows) = getDimensionsForRaster(demFilepath) GenericMetadata.writeStudyAreaEntry(context, "dem_columns", columns) GenericMetadata.writeStudyAreaEntry(context, "dem_rows", rows) # Write provenance inputDEMURL = "file://%s" % (inDEMPath,) asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'dem' asset.dcIdentifier = demFilename asset.dcSource = inputDEMURL asset.dcTitle = 'Digital Elevation Model' asset.dcPublisher = publisher asset.dcDescription = cmdline asset.writeToMetadata(context) asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'study_area_shapefile' asset.dcIdentifier = shpFilename asset.dcSource = inputDEMURL asset.dcTitle = 'Study area shapefile' asset.dcPublisher = publisher asset.dcDescription = cmdline
def test_provenance_overwrite(self): """ Test case writing provenance metadata, with overwrite """ asset = AssetProvenance() asset.section = GenericMetadata.MANIFEST_SECTION asset.name = "dem" asset.dcIdentifier = "dem.tif" asset.dcSource = "http://www.demexplorer.com/..." asset.dcTitle = "Study area DEM" asset.dcDate = datetime.strptime("201303", "%Y%m") asset.dcPublisher = "USGS" asset.dcDescription = "RegisterDEM.py ..." asset.writeToMetadata(self.context) assetProvenance = GenericMetadata.readAssetProvenanceObjects(self.context)[0] self.assertTrue(asset.section == assetProvenance.section) self.assertTrue(asset.name == assetProvenance.name) self.assertTrue(asset.dcIdentifier == assetProvenance.dcIdentifier) self.assertTrue(asset.dcSource == assetProvenance.dcSource) self.assertTrue(asset.dcTitle == assetProvenance.dcTitle) self.assertTrue(asset.dcDate == assetProvenance.dcDate) self.assertTrue(asset.dcPublisher == assetProvenance.dcPublisher) self.assertTrue(asset.dcDescription == assetProvenance.dcDescription) asset.dcIdentifier = 'foo.img' asset.dcSource = "http://a.different.url/..." asset.dcTitle = "A different study area DEM" asset.dcDate = datetime.strptime("201304", "%Y%m") asset.dcPublisher = "NASA" asset.dcDescription = "GetDEMExplorerDEM.py ..." asset.writeToMetadata(self.context) assetProvenance = GenericMetadata.readAssetProvenanceObjects(self.context)[0] self.assertTrue(asset.section == assetProvenance.section) self.assertTrue(asset.name == assetProvenance.name) self.assertTrue(asset.dcIdentifier == assetProvenance.dcIdentifier) self.assertTrue(asset.dcSource == assetProvenance.dcSource) self.assertTrue(asset.dcTitle == assetProvenance.dcTitle) self.assertTrue(asset.dcDate == assetProvenance.dcDate) self.assertTrue(asset.dcPublisher == assetProvenance.dcPublisher) self.assertTrue(asset.dcDescription == assetProvenance.dcDescription)
sys.exit( textwrap.fill('SSURGO features already exist in project directory. Use --overwrite option to overwrite.') ) # Get study area parameters studyArea = GenericMetadata.readStudyAreaEntries(context) bbox = bboxFromString(studyArea['bbox_wgs84']) outputrasterresolutionX = studyArea['dem_res_x'] outputrasterresolutionY = studyArea['dem_res_y'] srs = studyArea['dem_srs'] sys.stdout.write('Downloading SSURGO features for study area from USDA Soil Data mart...\n') sys.stdout.flush() shpFilename = getMapunitFeaturesForBoundingBox(context.config, context.projectDir, bbox, tileBbox=args.tile, t_srs=srs, tileDivisor=args.tiledivisor, keepOriginals=args.keeporiginals, overwrite=args.overwrite, nprocesses=args.nprocesses) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'soil_features' asset.dcIdentifier = shpFilename asset.dcSource = featurequery.WFS_URL asset.dcTitle = 'SSURGO soils data' asset.dcPublisher = 'USDA' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
(args.configfile, 'GDAL/OGR', 'PATH_OF_OGR2OGR')) if not context.config.has_option('HYDRO1k', 'PATH_OF_HYDRO1K_BAS'): sys.exit("Config file %s does not define option %s in section %s" % \ (args.configfile, 'HYDRO1k', 'PATH_OF_HYDRO1K_BAS')) if not context.config.has_option('HYDRO1k', 'HYDRO1k_BAS_LAYER_NAME'): sys.exit("Config file %s does not define option %s in section %s" % \ (args.configfile, 'HYDRO1k', 'HYDRO1k_BAS_LAYER_NAME')) if args.outfile: outfile = args.outfile else: outfile = "catchment" shapeFilename = "%s.shp" % (outfile) shapeFilepath = os.path.join(context.projectDir, shapeFilename) if not os.path.exists(shapeFilepath): getCatchmentShapefileForHYDRO1kBasins(context.config, context.projectDir, shapeFilename, args.basins) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'study_area_shapefile' asset.dcIdentifier = shapeFilename asset.dcSource = 'http://eros.usgs.gov/#/Find_Data/Products_and_Data_Available/gtopo30/hydro/namerica' asset.dcTitle = 'Study area shapefile derived from HYDRO1k Basins' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
demFilename = manifest['dem'] demFilepath = os.path.join(context.projectDir, demFilename) layerName = os.path.splitext(shpFilename)[0] # Get study area parameters studyArea = GenericMetadata.readStudyAreaEntries(context) outputrasterresolutionX = studyArea['dem_res_x'] outputrasterresolutionY = studyArea['dem_res_y'] # Truncate attributes to 10 characters because shapefiles rely on ancient technology sys.stdout.write('Generating soil property maps using SOLIM...') sys.stdout.flush() attrList = [elem[:10] for elem in ecohydrolib.ssurgo.attributequery.attributeListNumeric] rasterFiles = inferSoilPropertiesForSSURGOAndTerrainData(config=context.config, outputDir=context.projectDir, \ shpFilepath=shpFilepath, demFilepath=demFilepath, \ featureAttrList=attrList) sys.stdout.write('done\n') # Write metadata entries for attr in rasterFiles.keys(): asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = "soil_raster_%s" % (attr,) asset.dcIdentifier = rasterFiles[attr] asset.dcSource = 'http://solim.geography.wisc.edu' asset.dcTitle = attr asset.dcPublisher = 'Department of Geography, UW-Madison' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
if not context.config.has_option('HYDRO1k', 'PATH_OF_HYDRO1K_BAS'): sys.exit("Config file %s does not define option %s in section %s" % \ (args.configfile, 'HYDRO1k', 'PATH_OF_HYDRO1K_BAS')) if not context.config.has_option('HYDRO1k', 'HYDRO1k_BAS_LAYER_NAME'): sys.exit("Config file %s does not define option %s in section %s" % \ (args.configfile, 'HYDRO1k', 'HYDRO1k_BAS_LAYER_NAME')) if args.outfile: outfile = args.outfile else: outfile = "catchment" shapeFilename = "%s.shp" % (outfile) shapeFilepath = os.path.join(context.projectDir, shapeFilename) if not os.path.exists(shapeFilepath): getCatchmentShapefileForHYDRO1kBasins(context.config, context.projectDir, shapeFilename, args.basins) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'study_area_shapefile' asset.dcIdentifier = shapeFilename asset.dcSource = 'http://eros.usgs.gov/#/Find_Data/Products_and_Data_Available/gtopo30/hydro/namerica' asset.dcTitle = 'Study area shapefile derived from HYDRO1k Basins' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
sys.stdout.write('Extracting tile from local NLCD data...') sys.stdout.flush() extractTileFromRasterByRasterExtent(context.config, context.projectDir, demFilepath, nlcdRaster, tileFilename) sys.stdout.write('done\n') else: # Download NLCD from WCS sys.stdout.write("Downloading NLCD via WCS from %s..." % (HOST,) ) sys.stdout.flush() (returnCode, nlcdURL) = getNLCDForBoundingBox(context.config, context.projectDir, tileFilename, bbox=bbox, resx=outputrasterresolutionX, resy=outputrasterresolutionY, coverage='NLCD2006', srs=srs) assert(returnCode) sys.stdout.write('done\n') # Write metadata GenericMetadata.writeStudyAreaEntry(context, "landcover_type", "NLCD2006") # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'landcover' asset.dcIdentifier = tileFilename asset.dcSource = nlcdURL asset.dcTitle = 'The National Landcover Database 2006' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
if result: gage_lat = result[1] gage_lon = result[0] else: gage_lat = gage_lon = "Gage not found" # Write gage coordinates to a shapefile in the project directory shpFilename = writeCoordinatePairsToPointShapefile(context.projectDir, "gage", "gage_id", [args.gageid], [(gage_lon, gage_lat)]) # Write study area metadata GenericMetadata.writeStudyAreaEntry(context, 'gage_id_attr', 'gage_id') GenericMetadata.writeStudyAreaEntry(context, 'gage_id', args.gageid) GenericMetadata.writeStudyAreaEntry(context, 'nhd_gage_reachcode', reachcode) GenericMetadata.writeStudyAreaEntry(context, 'nhd_gage_measure_pct', measure) GenericMetadata.writeStudyAreaEntry(context, 'gage_lat_wgs84', gage_lat) GenericMetadata.writeStudyAreaEntry(context, 'gage_lon_wgs84', gage_lon) # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'gage' asset.dcIdentifier = shpFilename asset.dcSource = 'http://www.horizon-systems.com/NHDPlus/NHDPlusV2_home.php' asset.dcTitle = 'Streamflow gage' asset.dcPublisher = 'USGS' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)
sys.stdout.flush() shpFilepath = os.path.join( context.projectDir, manifest['soil_features'] ) deleteShapefile(shpFilepath) else: sys.exit( textwrap.fill('SSURGO features already exist in project directory. Use --overwrite option to overwrite.') ) # Get study area parameters studyArea = GenericMetadata.readStudyAreaEntries(context) bbox = bboxFromString(studyArea['bbox_wgs84']) outputrasterresolutionX = studyArea['dem_res_x'] outputrasterresolutionY = studyArea['dem_res_y'] srs = studyArea['dem_srs'] sys.stdout.write('Downloading SSURGO features for study area from USDA Soil Data mart...\n') sys.stdout.flush() shpFilename = getMapunitFeaturesForBoundingBox(context.config, context.projectDir, bbox, t_srs=srs)[0] # Write provenance asset = AssetProvenance(GenericMetadata.MANIFEST_SECTION) asset.name = 'soil_features' asset.dcIdentifier = shpFilename asset.dcSource = featurequery.WFS_URL asset.dcTitle = 'SSURGO soils data' asset.dcPublisher = 'USDA' asset.dcDescription = cmdline asset.writeToMetadata(context) # Write processing history GenericMetadata.appendProcessingHistoryItem(context, cmdline)