def pyramidsAndStatistics(workspace): try: arcpy.BuildPyramidsandStatistics_management(workspace, "INCLUDE_SUBDIRECTORIES", "BUILD_PYRAMIDS", "CALCULATE_STATISTICS") except arcpy.ExecuteError: print(arcpy.GetMessages(2)) except Exception as ex: print(ex.args[0])
def execute(self, parameters, messages): """The source code of your tool.""" arcpy.env.overwriteOutput = True arcpy.AddMessage("Batch convert MXE to ArcGIS Raster") for param in parameters: arcpy.AddMessage("Parameter: %s = %s" % (param.name, param.valueAsText) ) input_directory = parameters[0].valueAsText output_directory = parameters[1].valueAsText path_mxe = parameters[2].valueAsText if not os.path.exists(output_directory): os.makedirs(output_directory) command = 'java -cp "' + os.path.join(path_mxe, "maxent.jar") + '" density.Convert ' \ + str(input_directory) + " mxe " + output_directory + " asc" os.system(str(command)) # Set environment settings env.workspace = output_directory rasterlist = arcpy.ListRasters("*") arcpy.AddMessage("There are " + str(len(rasterlist)) + " rasters to process.") for raster in rasterlist: if not arcpy.Exists(os.path.join(output_directory, raster[0:5])): arcpy.AddMessage("Converting " + str(raster) + ".") arcpy.ASCIIToRaster_conversion(os.path.join(output_directory, raster), os.path.join(output_directory, raster[0:5]), "FLOAT") arcpy.BuildPyramidsandStatistics_management(in_workspace=output_directory, include_subdirectories="NONE", build_pyramids="BUILD_PYRAMIDS", calculate_statistics="CALCULATE_STATISTICS", BUILD_ON_SOURCE="NONE", block_field="", estimate_statistics="NONE", x_skip_factor="1", y_skip_factor="1", ignore_values="", pyramid_level="-1", SKIP_FIRST="NONE", resample_technique="NEAREST", compression_type="DEFAULT", compression_quality="75", skip_existing="SKIP_EXISTING") return
#Inpath = env.workspace #outpath="F:\\MissouriDo\\GeneralSearch\\EPA\Data_Weather\\AugResults.gdb" #Results=outpath+"\\"+ "Outputs" #Refresh gp object and permit overwriting arcpy.env.overwriteOutput = True ###Remove problematic files if os.path.exists("schema.ini"): os.remove("schema.ini") # Check out the ArcGIS Spatial Analyst extension license arcpy.CheckOutExtension("Spatial") # build the pyramids and statitics to easy enable visoulize the raster arcpy.BuildPyramidsandStatistics_management(arcpy.env.workspace) print "#######################################################################################################################" print "|The Spatail&Temporal Wetland Water Elevations and Volumes based on GREEN&T Infiltration & Evapotranspiration Models|" print "#######################################################################################################################" print "" ################################################################ #Local variables p = Raster("p") to = Raster("to") to = 0.0 t = Raster("to") watermask3 = Raster("watermask3")
def bal_cal(veg_class, slope, aspect, fdi): """ Calculate the BAL based on the classified vegetation and the combined slope and vegetation according to an appropriate table in AS 3959 (2009) to determine the bushfire attack level (BAL). :param veg_class: `file` the input classified vegetation :param slope: `file` the input slope :param aspect: `file` the input aspect :param fdi: `int` the input FDI value """ output_folder = os.path.dirname(veg_class) arcpy.env.overwriteOutput = True # set directory work_folder = output_folder os.chdir(work_folder) arcpy.env.workspace = work_folder # get veg raster size, format, projection, etc desc = arcpy.Describe(veg_class) extent = desc.extent lowleft_corner = arcpy.Point(extent.XMin, extent.YMin) pixel_w = desc.meanCellWidth pixel_h = desc.meanCellHeight sref = desc.spatialReference # load the raster into numpy array veg_data = arcpy.RasterToNumPyArray(veg_class, nodata_to_value=-99) slope_data = arcpy.RasterToNumPyArray(slope, nodata_to_value=-99) aspect_data = arcpy.RasterToNumPyArray(aspect, nodata_to_value=-99) # calculate the BAL for each direction in numpy array and get maximum of # 2 direction each time, until get the maximum of all directions dire = ['w', 'e', 'n', 's', 'nw', 'ne', 'se', 'sw'] for one_dir in dire: bal_list = [] outdata = convo(one_dir, veg_data, slope_data, aspect_data, pixel_w, fdi) output_dir = 'bal_' + one_dir + '.img' if arcpy.Exists(output_dir): arcpy.Delete_management(output_dir) arcpy.NumPyArrayToRaster(outdata, lowleft_corner, pixel_w, pixel_h, value_to_nodata=-99).save(output_dir) arcpy.DefineProjection_management(output_dir, sref) if one_dir == 'w': bigger = outdata del outdata continue bal_list.append(bigger) bal_list.append(outdata) bigger = get_max_bal(bal_list) del outdata # get maximum BAL from the list arcpy.NumPyArrayToRaster(bigger, lowleft_corner, pixel_w, pixel_h, value_to_nodata=-99).save('bal_max.img') arcpy.DefineProjection_management('bal_max.img', sref) arcpy.BuildPyramidsandStatistics_management(output_folder, "#", "BUILD_PYRAMIDS", "CALCULATE_STATISTICS") # delete intermediate results if arcpy.Exists(veg_class): arcpy.Delete_management(veg_class) if arcpy.Exists(slope): arcpy.Delete_management(slope) if arcpy.Exists(aspect): arcpy.Delete_management(aspect) del veg_data, slope_data, aspect_data del bal_list, bigger
sys.exit() # Process: Create Mosaic Dataset arcpy.CreateMosaicDataset_management(m_location, mosaic_name, coords, "4", "16_BIT_UNSIGNED", "NONE", "") print "Dataset de Mosaico ha sido creado" # Process: Create Statistics rasters = arcpy.ListRasters(Input_Data_Filter) for raster in rasters: arcpy.BuildPyramidsandStatistics_management( Input_Rasters_Data_Folder, "INCLUDE_SUBDIRECTORIES", "BUILD_PYRAMIDS", "CALCULATE_STATISTICS", "NONE", "", "NONE", "1", "1", "", "-1", "NONE", "NEAREST", "DEFAULT", "75", "OVERWRITE") print "se han caculado estadisticas a la imagen" + (raster) # Process: Add Rasters To Mosaic Dataset arcpy.AddRastersToMosaicDataset_management( m_location + "\\" + mosaic_name, "Raster Dataset", Input_Rasters_Data_Folder, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "0", "1500", "", Input_Data_Filter, "NO_SUBFOLDERS", "ALLOW_DUPLICATES", "BUILD_PYRAMIDS", "CALCULATE_STATISTICS", "NO_THUMBNAILS", "", "NO_FORCE_SPATIAL_REFERENCE", "ESTIMATE_STATISTICS") print "Los rasters se han agregado" m = m_location + "\\" + mosaic_name
lidarlist = arcpy.ListRasters() arcpy.AddMessage("Setting null values...") for file in lidarlist: outSetNull = arcpy.sa.SetNull(file, file, "VALUE < -1355") outSetNull = arcpy.sa.SetNull(file, file, "VALUE > 29100") outsplit = file.split(".") outfilename = "stnull_" + outsplit[0] + ".tif" outfilepath = in_workspace + "/" + outfilename outSetNull.save(outfilepath) arcpy.AddMessage("Building pyramids and calculating statistics...") arcpy.BuildPyramidsandStatistics_management(in_workspace, "NONE", "BUILD_PYRAMIDS", "CALCULATE_STATISTICS") lidarstnull = arcpy.ListRasters( "stnull_*" ) # creates a list of rasters in the folder with _stnull suffix outraster = outrastername + ".tif" arcpy.AddMessage("Mosaicking images...") testraster = in_workspace + "/" + lidarstnull[ 0] #chooses a raster to check for cell size and pixel type #reads pixel size from the test raster and stores as text for mosaic to new raster tool xcellsize = str( arcpy.GetRasterProperties_management(testraster, "CELLSIZEX"))
def function(outputFolder, DEM, studyAreaMask, streamInput, minAccThresh, majAccThresh, smoothDropBuffer, smoothDrop, streamDrop, reconDEM, rerun=False): try: # Set environment variables arcpy.env.compression = "None" arcpy.env.snapRaster = DEM arcpy.env.extent = DEM arcpy.env.cellSize = arcpy.Describe(DEM).meanCellWidth ######################## ### Define filenames ### ######################## files = common.getFilenames('preprocess', outputFolder) rawDEM = files.rawDEM hydDEM = files.hydDEM hydFDR = files.hydFDR hydFDRDegrees = files.hydFDRDegrees hydFAC = files.hydFAC streamInvRas = files.streamInvRas # Inverse stream raster - 0 for stream, 1 for no stream streams = files.streams streamDisplay = files.streamDisplay multRaster = files.multRaster hydFACInt = files.hydFACInt slopeRawDeg = files.slopeRawDeg slopeRawPer = files.slopeRawPer slopeHydDeg = files.slopeHydDeg slopeHydPer = files.slopeHydPer ############################### ### Set temporary variables ### ############################### prefix = os.path.join(arcpy.env.scratchGDB, "base_") cellSizeDEM = float(arcpy.env.cellSize) burnedDEM = prefix + "burnedDEM" streamAccHaFile = prefix + "streamAccHa" rawFDR = prefix + "rawFDR" allPolygonSinks = prefix + "allPolygonSinks" DEMTemp = prefix + "DEMTemp" hydFACTemp = prefix + "hydFACTemp" # Saved as .tif as did not save as ESRI grid on server streamsRasterFile = os.path.join(arcpy.env.scratchFolder, "base_") + "StreamsRaster.tif" ############################### ### Save DEM to base folder ### ############################### codeBlock = 'Save DEM' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Save DEM to base folder as raw DEM with no compression pixelType = int( arcpy.GetRasterProperties_management(DEM, "VALUETYPE").getOutput(0)) if pixelType == 9: # 32 bit float arcpy.CopyRaster_management(DEM, rawDEM, pixel_type="32_BIT_FLOAT") else: log.info("Converting DEM to 32 bit floating type") arcpy.CopyRaster_management(DEM, DEMTemp) arcpy.CopyRaster_management(Float(DEMTemp), rawDEM, pixel_type="32_BIT_FLOAT") # Delete temporary DEM arcpy.Delete_management(DEMTemp) # Calculate statistics for raw DEM arcpy.CalculateStatistics_management(rawDEM) progress.logProgress(codeBlock, outputFolder) ################################ ### Create multiplier raster ### ################################ codeBlock = 'Create multiplier raster' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): Reclassify(rawDEM, "Value", RemapRange([[-999999.9, 999999.9, 1]]), "NODATA").save(multRaster) progress.logProgress(codeBlock, outputFolder) codeBlock = 'Calculate slope in percent' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): intSlopeRawPer = Slope(rawDEM, "PERCENT_RISE") intSlopeRawPer.save(slopeRawPer) del intSlopeRawPer log.info('Slope calculated in percent') progress.logProgress(codeBlock, outputFolder) if reconDEM is True: ####################### ### Burn in streams ### ####################### codeBlock = 'Burn in streams' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Recondition DEM (burning stream network in using AGREE method) log.info("Burning streams into DEM.") reconditionDEM.function(rawDEM, streamInput, smoothDropBuffer, smoothDrop, streamDrop, burnedDEM) log.info("Completed stream network burn in to DEM") progress.logProgress(codeBlock, outputFolder) ################## ### Fill sinks ### ################## codeBlock = 'Fill sinks' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): Fill(burnedDEM).save(hydDEM) log.info("Sinks in DEM filled") progress.logProgress(codeBlock, outputFolder) ###################### ### Flow direction ### ###################### codeBlock = 'Flow direction' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): FlowDirection(hydDEM, "NORMAL").save(hydFDR) log.info("Flow Direction calculated") progress.logProgress(codeBlock, outputFolder) ################################# ### Flow direction in degrees ### ################################# codeBlock = 'Flow direction in degrees' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Save flow direction raster in degrees (for display purposes) degreeValues = RemapValue([[1, 90], [2, 135], [4, 180], [8, 225], [16, 270], [32, 315], [64, 0], [128, 45]]) Reclassify(hydFDR, "Value", degreeValues, "NODATA").save(hydFDRDegrees) progress.logProgress(codeBlock, outputFolder) ######################### ### Flow accumulation ### ######################### codeBlock = 'Flow accumulation' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): hydFACTemp = FlowAccumulation(hydFDR, "", "FLOAT") hydFACTemp.save(hydFAC) arcpy.sa.Int(Raster(hydFAC)).save(hydFACInt) # integer version log.info("Flow Accumulation calculated") progress.logProgress(codeBlock, outputFolder) ####################### ### Calculate slope ### ####################### codeBlock = 'Calculate slope on burned DEM' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): intSlopeHydDeg = Slope(hydDEM, "DEGREE") intSlopeHydDeg.save(slopeHydDeg) del intSlopeHydDeg intSlopeHydPer = Slope(hydDEM, "PERCENT_RISE") intSlopeHydPer.save(slopeHydPer) del intSlopeHydPer log.info('Slope calculated') progress.logProgress(codeBlock, outputFolder) ########################## ### Create stream file ### ########################## codeBlock = 'Create stream file' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Create accumulation in metres streamAccHaFileInt = hydFACTemp * cellSizeDEM * cellSizeDEM / 10000.0 streamAccHaFileInt.save(streamAccHaFile) del streamAccHaFileInt # Check stream initiation threshold reached streamYes = float( arcpy.GetRasterProperties_management( streamAccHaFile, "MAXIMUM").getOutput(0)) if streamYes > float(minAccThresh): reclassifyRanges = RemapRange( [[-1000000, float(minAccThresh), 1], [float(minAccThresh), 9999999999, 0]]) outLUCIstream = Reclassify(streamAccHaFile, "VALUE", reclassifyRanges) outLUCIstream.save(streamInvRas) del outLUCIstream log.info("Stream raster for input to LUCI created") # Create stream file for display reclassifyRanges = RemapRange( [[0, float(minAccThresh), "NODATA"], [float(minAccThresh), float(majAccThresh), 1], [float(majAccThresh), 99999999999999, 2]]) streamsRaster = Reclassify(streamAccHaFile, "Value", reclassifyRanges, "NODATA") streamOrderRaster = StreamOrder(streamsRaster, hydFDR, "STRAHLER") streamsRaster.save(streamsRasterFile) # Create two streams feature classes - one for analysis and one for display arcpy.sa.StreamToFeature(streamOrderRaster, hydFDR, streams, 'NO_SIMPLIFY') arcpy.sa.StreamToFeature(streamOrderRaster, hydFDR, streamDisplay, 'SIMPLIFY') # Rename grid_code column to 'Strahler' for streamFC in [streams, streamDisplay]: arcpy.AddField_management(streamFC, "Strahler", "LONG") arcpy.CalculateField_management( streamFC, "Strahler", "!GRID_CODE!", "PYTHON_9.3") arcpy.DeleteField_management(streamFC, "GRID_CODE") del streamsRaster del streamOrderRaster log.info("Stream files created") else: warning = 'No streams initiated' log.warning(warning) common.logWarnings(outputFolder, warning) # Create LUCIStream file from multiplier raster (i.e. all cells have value of 1 = no stream) arcpy.CopyRaster_management(multRaster, streamInvRas) progress.logProgress(codeBlock, outputFolder) codeBlock = 'Clip data, build pyramids and generate statistics' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): try: # Generate pyramids and stats arcpy.BuildPyramidsandStatistics_management( outputFolder, "", "", "", "") log.info( "Pyramids and Statistics calculated for all LUCI topographical information rasters" ) except Exception: log.info("Warning - could not generate all raster statistics") progress.logProgress(codeBlock, outputFolder) # Reset snap raster arcpy.env.snapRaster = None except Exception: log.error("Error in preprocessing operations") raise
r_2010 = Raster(td+'/popdensity_2010_mol.tif') r_2015 = Raster(td+'/popdensity_2015_mol.tif') cell_km2 = math.pow(float(arcpy.env.cellSize) / 1000, 2) # since original rasters were in population density (# people / km2) # using output from model_equations.R equations = {2005:"r_2005", 2006:"0.8 * r_2005 + 0.2 * r_2010", 2007:"0.6 * r_2005 + 0.4 * r_2010", 2008:"0.4 * r_2005 + 0.6 * r_2010", 2009:"0.2 * r_2005 + 0.8 * r_2010", 2010:"r_2010", 2011:"0.8 * r_2010 + 0.2 * r_2015", 2012:"0.6 * r_2010 + 0.4 * r_2015", 2013:"0.4 * r_2010 + 0.6 * r_2015", 2014:"0.2 * r_2010 + 0.8 * r_2015", 2015:"r_2015",} # calculate number of people in a pixel (vs density) and extract sum per region for yr in range(2005,2016): r_out = '%s/popdensity_%d_mol.tif' % (dd, yr) d_out = '%s/rgn_popsum%d_inland25mi.dbf' % (dd, yr) arcpy.AddMessage(' %d interpolate and sum by region' % yr) r = eval('(%s) * %g' % (equations[yr], cell_km2)) r.save(r_out) ZonalStatisticsAsTable(td+'/rgn_inland_25mi_mol.tif', 'VALUE', r, d_out, 'DATA', 'SUM') # build pyramids arcpy.AddMessage('build pyramids') arcpy.BuildPyramidsandStatistics_management(dd, 'INCLUDE_SUBDIRECTORIES', 'BUILD_PYRAMIDS', 'CALCULATE_STATISTICS', skip_existing='SKIP_EXISTING')
def execute(self, parameters, messages): t_start = time.clock() arcpy.env.overwriteOutput = True for param in parameters: arcpy.AddMessage("Parameter: %s = %s" % (param.name, param.valueAsText)) input_woa_netcdf = parameters[0].valueAsText variable_name = parameters[1].valueAsText depths = parameters[2].valueAsText interpolation_procedure = parameters[3].valueAsText interpolation_resolution = parameters[4].valueAsText extraction_extent = parameters[5].valueAsText output_directory = parameters[6].valueAsText coordinate_system = parameters[7].valueAsText createxyz = parameters[8].valueAsText cpu_cores_used = parameters[9].valueAsText if not os.path.exists(output_directory): os.makedirs(output_directory) if not os.path.exists(os.path.join(output_directory, "Projected")): os.makedirs(os.path.join(output_directory, "Projected")) if not os.path.exists(os.path.join(output_directory, "Geographic")): os.makedirs(os.path.join(output_directory, "Geographic")) arcpy.env.extent = extraction_extent arcpy.AddMessage("Extracting " + str(input_woa_netcdf) + ".") # Set environment variables and build other variables for processing arcpy.env.mask = "" arcpy.env.workspace = output_directory depth_range = load_depth_string(depths) arcpy.AddMessage("There are " + str(len(depth_range)) + " to process.") if int(cpu_cores_used) > int(multiprocessing.cpu_count()): cpu_cores_used = multiprocessing.cpu_count() - 1 arcpy.AddMessage("Will use " + str(cpu_cores_used) + " cores for processing") python_exe = os.path.join(sys.exec_prefix, 'pythonw.exe') multiprocessing.set_executable(python_exe) pool = multiprocessing.Pool(int(cpu_cores_used)) func = partial(mpprocess_call, output_directory, variable_name, input_woa_netcdf, interpolation_procedure, interpolation_resolution, coordinate_system, extraction_extent, createxyz) pool.map(func, depth_range) pool.close() pool.join() count = 0 depth_range = load_depth_string(depths) for i in depth_range: try: output_geographic = os.path.join( output_directory, "temp", "Geographic", variable_name[0:4] + str(int(i)), variable_name[0:4] + str(int(i))) if arcpy.Exists(output_geographic) and not arcpy.Exists( os.path.join(output_directory, "Geographic", variable_name[0:4] + str(int(i)))): shutil.copytree( output_geographic, os.path.join(output_directory, "Geographic", variable_name[0:4].lower() + str(int(i)))) except: arcpy.AddMessage("Issue copying, geographic for depth " + str(int(i))) try: output_projected = os.path.join( output_directory, "temp", "Projected", variable_name[0:4] + str(int(i)), variable_name[0:4] + str(int(i))) if arcpy.Exists(output_projected) and not arcpy.Exists( os.path.join(output_directory, "Projected", variable_name[0:4] + str(int(i)))): shutil.copytree( output_projected, os.path.join( output_directory, "Projected", str(variable_name[0:4].lower() + str(int(i))))) except: arcpy.AddMessage("Issue copying, projected for depth " + str(int(i))) try: if count == 0: if os.path.exists( os.path.join(output_directory, "temp", "Projected", variable_name[0:4] + str(int(i)), "xy_coords.yxz")): shutil.copyfile( os.path.join(output_directory, "temp", "Projected", variable_name[0:4] + str(int(i)), "xy_coords.yxz"), os.path.join(output_directory, "Projected", "xy_coords.yxz")) count = 1 except: arcpy.AddMessage("Issue copying, xyz coordiantes for depth " + str(int(i))) arcpy.AddMessage("Making pyramids and statistics for outputs") arcpy.BuildPyramidsandStatistics_management( in_workspace=os.path.join(output_directory, "Geographic"), include_subdirectories="NONE", build_pyramids="BUILD_PYRAMIDS", calculate_statistics="CALCULATE_STATISTICS", BUILD_ON_SOURCE="NONE", block_field="", estimate_statistics="NONE", x_skip_factor="1", y_skip_factor="1", ignore_values="", pyramid_level="-1", SKIP_FIRST="NONE", resample_technique="NEAREST", compression_type="DEFAULT", compression_quality="75", skip_existing="SKIP_EXISTING") if len(coordinate_system) > 1: arcpy.BuildPyramidsandStatistics_management( in_workspace=os.path.join(output_directory, "Projected"), include_subdirectories="NONE", build_pyramids="BUILD_PYRAMIDS", calculate_statistics="CALCULATE_STATISTICS", BUILD_ON_SOURCE="NONE", block_field="", estimate_statistics="NONE", x_skip_factor="1", y_skip_factor="1", ignore_values="", pyramid_level="-1", SKIP_FIRST="NONE", resample_technique="NEAREST", compression_type="DEFAULT", compression_quality="75", skip_existing="SKIP_EXISTING") arcpy.AddMessage("Script complete in %s hours." % str( (time.clock() - t_start) / 3600)) return
print("Starting script at: {}".format(start_t)) today = datetime.datetime.today() date = today.strftime('%Y%m%d') # Create a list of the rasters in the in-workspace print("Listing Geodatabases") gdbs = arcpy.ListWorkspaces("*", "FileGDB") # Loop through GeoDatabases, listing rasters within each to input in cell stats to create action area for gdb in gdbs: t2 = datetime.datetime.now() arcpy.env.workspace = gdb arcpy.env.scratchWorkspace = gdb desc = arcpy.Describe(gdb) # Nomenclature for Action Areas (AA). Adding the '_dev' indicates that the AA included developed and right-of-way. # '_Ag' for only agricultural footprints, etc. aa = "{}_{}".format(desc.basename, date) uses = arcpy.ListRasters("*") print("Running Cell Statistics for {} at {}".format( gdb, datetime.datetime.now())) out = CellStatistics(uses, "MINIMUM") out.save(aa) print("Building pyramids") arcpy.BuildPyramidsandStatistics_management(aa) print("Completed Action Area for {} in: {}".format( desc.basename, datetime.datetime.now() - t2)) print("Completed Processing of all Action Areas in: {}".format( datetime.datetime.now() - start_t))
def build_pyramids(input_items, compression_method, compression_quality, resampling_method, show_progress=False): """Build raster pyramids.""" processed = 0 skipped = 0 if show_progress: i = 1. count = len(input_items) status_writer.send_percent(0.0, _('Starting to process...'), 'build_raster_pyramids') for result in input_items: dsc = arcpy.Describe(result) if not hasattr(dsc, 'datasetType'): status_writer.send_state(status.STAT_WARNING, _('{0} is not a raster dataset type.').format(result)) skipped += 1 skipped_reasons[result] = _('is not a raster dataset type.') if show_progress: i += 1 continue if not dsc.datasetType in ('RasterDataset', 'MosaicDataset', 'RasterCatalog'): status_writer.send_state(status.STAT_WARNING, _('{0} is not a raster dataset type.').format(result)) skipped += 1 skipped_reasons[result] = _('is not a raster dataset type.') if show_progress: i += 1 else: try: # Build pyramids if dsc.datasetType in ('RasterCatalog', 'MosaicDataset'): status_writer.send_status(_('Building pyramids for: {0}').format(result)) arcpy.BuildPyramidsandStatistics_management( result, calculate_statistics='NONE', resample_technique=resampling_options[resampling_method], compression_type=compression_method, compression_quality=compression_quality ) # ArcGIS 10.1 bug - Pyramids are not build beyond the first level for rasters in SDE. # See: https://geonet.esri.com/thread/71775 else: arcpy.BuildPyramids_management( result, resample_technique=resampling_options[resampling_method], compression_type=compression_method, compression_quality=compression_quality ) if show_progress: status_writer.send_percent(i / count, _('Built Pyramids for: {0}').format(dsc.name), 'build_raster_pyramids') i += 1 else: status_writer.send_status(_('Built Pyramids for: {0}').format(dsc.name)) processed += 1 except arcpy.ExecuteError as ee: status_writer.send_state(status.STAT_WARNING, _('Failed to build pyramids for: {0}. {1}').format(result, ee)) skipped_reasons[result] = ee.message skipped += 1 if show_progress: i += 1 continue return processed, skipped
countyname, "#", "DEFINE_MISSING_TILES", "GENERATE_OVERVIEWS", "GENERATE_MISSING_IMAGES", "REGENERATE_STALE_IMAGES") # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script # The following inputs are layers or table views: countyname print "Calculating statistics" arcpy.CalculateStatistics_management( basedir + countyname_with_spaces + "\\" + countyname + ".gdb\\" + countyname, "1", "1", "#", "OVERWRITE", "Feature Set") # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script # The following inputs are layers or table views: countyname print "Calc stats 1" arcpy.BuildPyramidsandStatistics_management( basedir + countyname_with_spaces + "\\" + countyname + ".gdb\\" + countyname, "INCLUDE_SUBDIRECTORIES", "NONE", "CALCULATE_STATISTICS", "NONE", "#", "NONE", "1", "1", "#", "-1", "NONE", "NEAREST", "DEFAULT", "75", "SKIP_EXISTING") # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script # The following inputs are layers or table views: countyname print "Building pyramids" arcpy.BuildPyramidsandStatistics_management( basedir + countyname_with_spaces + "\\" + countyname + ".gdb\\" + countyname, "INCLUDE_SUBDIRECTORIES", "BUILD_PYRAMIDS", "NONE", "BUILD_ON_SOURCE", "#", "NONE", "1", "1", "#", "-1", "NONE", "NEAREST", "DEFAULT", "75", "SKIP_EXISTING") # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script # The following inputs are layers or table views: countyname print "Calc stats 2"
def execute(self, parameters, messages): t_start = time.clock() arcpy.env.overwriteOutput = True for param in parameters: arcpy.AddMessage("Parameter: %s = %s" % (param.name, param.valueAsText)) input_woa_netcdf = parameters[0].valueAsText variable_name = parameters[1].valueAsText lat_name = parameters[2].valueAsText lon_name = parameters[3].valueAsText depth_name = parameters[4].valueAsText depths = parameters[5].valueAsText interpolation_procedure = parameters[6].valueAsText interpolation_resolution = parameters[7].valueAsText extraction_extent = parameters[8].valueAsText temporary_directory = parameters[9].valueAsText output_directory = parameters[10].valueAsText coordinate_system = parameters[11].valueAsText createxyz = parameters[12].valueAsText if not os.path.exists(output_directory): os.makedirs(output_directory) if not os.path.exists(os.path.join(output_directory, "Projected")): os.makedirs(os.path.join(output_directory, "Projected")) if not os.path.exists(os.path.join(output_directory, "Geographic")): os.makedirs(os.path.join(output_directory, "Geographic")) if not os.path.exists(temporary_directory): os.makedirs(temporary_directory) arcpy.env.extent = extraction_extent arcpy.AddMessage("Extracting " + str(input_woa_netcdf) + ".") # Set environment variables and build other variables for processing arcpy.env.mask = "" arcpy.env.workspace = temporary_directory depth_range = load_depth_string(depths) # Process goes: 1) Convert depth layer to a point file. 2) Interpolate to selected resolution using your selected # interpolation procedure, 3) Save that layer back into a layer with the name of the variable, plus the # actual depth value associated with it, you will end up with a specified direction of n rasters (n = number of # depth layers. # First lets give an indication of the magnitude of this analysis arcpy.AddMessage("There are " + str(len(depth_range)) + " depths to process.") count_geo = 0 count_proj = 0 for i in depth_range: arcpy.AddMessage("Working on " + str(int(i))) # Set some values that we will use to extract data from the NetCDF file out_temp_layer = os.path.join(output_directory, "out.shp") dimensionValues = str(depth_name) + " " + str(int(i)) arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(4326) # 1 Extract layer to a temporary feature class arcpy.MakeNetCDFFeatureLayer_md(in_netCDF_file=input_woa_netcdf, variable=variable_name, x_variable=str(lon_name), y_variable=str(lat_name), out_feature_layer=out_temp_layer, row_dimension=str(lat_name) + ";" + str(lon_name), z_variable="", m_variable="", dimension_values=dimensionValues, value_selection_method="BY_VALUE") # 2 Interpolate to higher resolution and 3 save to output directory if interpolation_procedure == "IDW": arcpy.AddMessage("Interpolating " + str(int(i)) + " using IDW") arcpy.gp.Idw_sa(out_temp_layer, variable_name, os.path.join(output_directory, "Geographic", variable_name[0:4] + str(int(i))), interpolation_resolution, "2", "VARIABLE 10", "") elif interpolation_procedure == "Spline": arcpy.AddMessage("Interpolating " + str(int(i)) + " using Spline") arcpy.gp.Spline_sa(out_temp_layer, variable_name, os.path.join(output_directory, "Geographic", variable_name[0:4] + str(int(i))), interpolation_resolution, "TENSION", "0.1", "10") arcpy.Delete_management(os.path.join(output_directory, "out.shp")) elif interpolation_procedure == "Kriging": arcpy.AddMessage("Interpolating " + str(int(i)) + " using Ordinary Kriging") arcpy.gp.Kriging_sa(out_temp_layer, variable_name, os.path.join(output_directory, "Geographic", variable_name[0:4] + str(int(i))), "Spherical " + str(interpolation_resolution), interpolation_resolution, "VARIABLE 10", "") elif interpolation_procedure == "Natural Neighbor": arcpy.AddMessage("Interpolating " + str(int(i)) + " using Natural Neighbor") arcpy.NaturalNeighbor_3d(out_temp_layer, variable_name, os.path.join(output_directory, "Geographic", variable_name[0:4] + str(int(i))), interpolation_resolution) elif interpolation_procedure == "Natural Neighbor and IDW": arcpy.AddMessage("Interpolating " + str(int(i)) + " using Natural Neighbor and IDW") if not os.path.exists(os.path.join(output_directory, "temp", "idw")): os.makedirs(os.path.join(output_directory, "temp", "idw")) if not os.path.exists(os.path.join(output_directory, "temp", "nat")): os.makedirs(os.path.join(output_directory, "temp", "nat")) arcpy.NaturalNeighbor_3d(out_temp_layer, variable_name, os.path.join(output_directory, "temp", "nat", variable_name[0:4] + str(int(i))), interpolation_resolution) arcpy.gp.Idw_sa(out_temp_layer, variable_name, os.path.join(output_directory, "temp", "idw", variable_name[0:4] + str(int(i))), interpolation_resolution, "2", "VARIABLE 10", "") input_rasters = [os.path.join(output_directory, "temp", "nat", variable_name[0:4] + str(int(i))), os.path.join(output_directory, "temp", "idw", variable_name[0:4] + str(int(i)))] arcpy.MosaicToNewRaster_management(input_rasters=input_rasters, output_location=os.path.join(out_dir, "Geographic"), raster_dataset_name_with_extension=str(i), coordinate_system_for_the_raster="", pixel_type="8_BIT_UNSIGNED", cellsize="", number_of_bands="1", mosaic_method="FIRST", mosaic_colormap_mode="FIRST") elif interpolation_procedure == "None": arcpy.AddMessage("Making a raster for " + str(int(i))) arcpy.MakeNetCDFRasterLayer_md(in_netCDF_file=input_woa_netcdf, variable=variable_name, x_dimension=lon_name, y_dimension=lat_name, out_raster_layer=variable_name[0:4] + str(int(i)), band_dimension="", dimension_values="", value_selection_method="BY_VALUE") arcpy.CopyRaster_management(variable_name[0:4] + str(int(i)), os.path.join(output_directory, "Geographic", variable_name[0:4] + str(int(i))), "", "", "", "NONE", "NONE", "") if len(coordinate_system) > 1: arcpy.AddMessage("Reprojecting " + variable_name[0:4] + str(int(i)) + ".") arcpy.ProjectRaster_management(os.path.join(output_directory, "Geographic", variable_name[0:4] + str(int(i))), os.path.join(output_directory, "Projected", variable_name[0:4] + str(int(i))), coordinate_system, "NEAREST", "#", "#", "#", "#") arcpy.AddMessage("Generating master file for trilinear interpolation") if createxyz == "Only Geographic" or createxyz == "Both": if not os.path.exists(os.path.join(output_directory, "Geographic_yxz")): os.makedirs(os.path.join(output_directory, "Geographic_yxz")) raster_to_xyz(os.path.join(output_directory, "Geographic", variable_name[0:4] + str(int(i))), variable_name[0:4] + str(int(i)), os.path.join(output_directory, "Geographic_yxz"), 349000000.0) depth = int(filter(str.isdigit, str(i))) if count_geo == 0: df = pd.read_csv(os.path.join(output_directory, "Geographic_yxz", variable_name[0:4] + str(int(i)) + ".yxz"), header=0, names=["y", "x", "z"], sep=" ", dtype={"y": np.float64, "x": np.float64, "z": np.float64}) master = df[["x", "y", "z"]].copy() master.columns = ["x", "y", int(depth)] master.to_pickle(os.path.join(output_directory, "Geographic_yxz", "master.pkl")) os.remove(os.path.join(output_directory, "Geographic_yxz", variable_name[0:4] + str(int(i)) + ".yxz")) del df, master gc.collect() count_geo = 1 if count_geo == 1: master = pd.read_pickle(os.path.join(output_directory, "Geographic_yxz", "master.pkl")) df = pd.read_csv(os.path.join(output_directory, "Geographic_yxz", variable_name[0:4] + str(int(i)) + ".yxz"), header=0, names=["y", "x", "z"], sep=" ", dtype={"y": np.float64, "x": np.float64, "z": np.float64}) master[int(depth)] = df["z"].copy() master.to_pickle(os.path.join(output_directory, "Geographic_yxz", "master.pkl")) os.remove(os.path.join(output_directory, "Geographic_yxz", variable_name[0:4] + str(int(i)) + ".yxz")) del df, master gc.collect() if createxyz == "Only Projected" or createxyz == "Both": if not os.path.exists(os.path.join(output_directory, "Projected_yxz")): os.makedirs(os.path.join(output_directory, "Projected_yxz")) raster_to_xyz(os.path.join(output_directory, "Projected", variable_name[0:4] + str(int(i))), variable_name[0:4] + str(int(i)), os.path.join(output_directory, "Projected_yxz"), 349000000.0) depth = int(filter(str.isdigit, str(i))) if count_proj == 0: df = pd.read_csv(os.path.join(output_directory, "Projected_yxz", variable_name[0:4] + str(int(i)) + ".yxz"), header=0, names=["y", "x", "z"], sep=" ", dtype={"y": np.float64, "x": np.float64, "z": np.float64}) master = df[["x", "y"]].copy() master.columns = ["x", "y"] master = np.round(master, 4) master.to_pickle(os.path.join(output_directory, "Projected_yxz", "xy_coords.pkl")) master.to_pickle(os.path.join(output_directory, "Projected", "xy_coords.pkl")) del master gc.collect() master_z = df[["z"]].copy() master_z.columns = [int(depth)] master_z = np.round(master_z, 4) master_z.to_pickle(os.path.join(output_directory, "Projected_yxz", str(int(i)) + ".pkl")) os.remove(os.path.join(output_directory, "Projected_yxz", variable_name[0:4] + str(int(i)) + ".yxz")) del df, master_z gc.collect() count_proj = 1 elif count_proj == 1: df = pd.read_csv(os.path.join(output_directory, "Projected_yxz", variable_name[0:4] + str(int(i)) + ".yxz"), header=0, names=["y", "x", "z"], sep=" ", dtype={"y": np.float64, "x": np.float64, "z": np.float64}) master_z = df[["z"]].copy() master_z.columns = [int(depth)] master_z = np.round(master_z, 4) master_z.to_pickle(os.path.join(output_directory, "Projected_yxz", str(int(i)) + ".pkl")) os.remove(os.path.join(output_directory, "Projected_yxz", variable_name[0:4] + str(int(i)) + ".yxz")) del df, master_z gc.collect() arcpy.AddMessage("Making pyramids and statistics for outputs") arcpy.BuildPyramidsandStatistics_management(in_workspace=os.path.join(output_directory, "Geographic"), include_subdirectories="NONE", build_pyramids="BUILD_PYRAMIDS", calculate_statistics="CALCULATE_STATISTICS", BUILD_ON_SOURCE="NONE", block_field="", estimate_statistics="NONE", x_skip_factor="1", y_skip_factor="1", ignore_values="", pyramid_level="-1", SKIP_FIRST="NONE", resample_technique="NEAREST", compression_type="DEFAULT", compression_quality="75", skip_existing="SKIP_EXISTING") if len(coordinate_system) > 1: arcpy.BuildPyramidsandStatistics_management(in_workspace=os.path.join(output_directory, "Projected"), include_subdirectories="NONE", build_pyramids="BUILD_PYRAMIDS", calculate_statistics="CALCULATE_STATISTICS", BUILD_ON_SOURCE="NONE", block_field="", estimate_statistics="NONE", x_skip_factor="1", y_skip_factor="1", ignore_values="", pyramid_level="-1", SKIP_FIRST="NONE", resample_technique="NEAREST", compression_type="DEFAULT", compression_quality="75", skip_existing="SKIP_EXISTING") arcpy.AddMessage("Script complete in %s seconds." % (time.clock() - t_start)) return
"""-------- RUN SELECTED PROCESSES ------------------------------------------""" if processesToRun == 'All': processesToRun = ['Freq', 'LCBinary', 'LCSum','School', 'GreenP', 'ImpP', 'Parks', 'NrRd', 'WVW', 'WVT', 'RB', 'GUIDOS_Prep', 'GUIDOS', 'GSTCnWR', 'IntDen', 'Floodplains', 'NrRdRsch', 'Metadata'] """ Make Sure the Processes Listed are Real """ for process in processesToRun: if process not in ['Freq', 'LCBinary', 'LCSum','School', 'GreenP', 'ImpP', 'Parks', 'NrRd', 'WVW', 'WVT', 'RB', 'GUIDOS_Prep', 'GUIDOS', 'GSTCnWR', 'IntDen', 'Floodplains', 'NrRdRsch', 'Metadata']: print 'One of the processes you listed is not acutally a process. Please correct and rerun.' exit() if 'Freq' in processesToRun: import Frequent_V2 Frequent_V2.freq(city, inDir, workFld) arcpy.BuildPyramidsandStatistics_management(workFld + '/' + city + '_Freq.gdb') if 'GUIDOS_Prep' in processesToRun: import GUIDOS_Prep GUIDOS_Prep.Guidos_Prep(city, inDir, workFld) arcpy.BuildPyramidsandStatistics_management(workFld + '/' + city + '_Split') if 'LCBinary' in processesToRun: import LCBinary LCBinary.LCBin(city, inDir, workFld) arcpy.BuildPyramidsandStatistics_management(workFld + '/' + city + '_Freq.gdb') if 'LCSum' in processesToRun: import LCSum LCSum.LCSum(city, inDir, workFld)