def StudyAreaWatershed(output_workspace, dem_hydro, processes): # Check out the ArcGIS Spatial Analyst extension license arcpy.CheckOutExtension("Spatial") # Set environment variables arcpy.env.overwriteOutput = True arcpy.env.workspace = output_workspace arcpy.env.scratchWorkspace = output_workspace arcpy.env.cellSize = dem_hydro arcpy.env.parallelProcessingFactor = processes # List parameter values arcpy.AddMessage("Workspace: {}".format(arcpy.env.workspace)) arcpy.AddMessage("DEM hydro: " "{}".format(arcpy.Describe(dem_hydro).baseName)) arcpy.AddMessage("processes: {}".format(processes)) # Fill sinks arcpy.AddMessage("Beginning filling sinks...") dem_fill = arcpy.sa.Fill(in_surface_raster=dem_hydro) arcpy.AddMessage("Fill sinks complete.") # Calculate D8 flow direction (suitable for input into the Watershed tool) arcpy.AddMessage("Beginning flow direction...") flow_dir_d8 = arcpy.sa.FlowDirection(in_surface_raster=dem_fill, flow_direction_type="D8") flow_direction_d8 = os.path.join(output_workspace, "flow_direction_d8") flow_dir_d8.save(flow_direction_d8) # Calculate raster statistics and build pyramids arcpy.AddMessage(" Calculating statistics...") arcpy.CalculateStatistics_management(flow_direction_d8) arcpy.AddMessage(" Building pyraminds...") arcpy.BuildPyramids_management(flow_direction_d8) arcpy.AddMessage("Flow direction complete.") # Calculate flow accumulation arcpy.AddMessage("Beginning flow accumulation...") flow_accum_d8 = arcpy.sa.FlowAccumulation(flow_dir_d8, data_type="FLOAT", flow_direction_type="D8") flow_accumulation_d8 = os.path.join(output_workspace, "flow_accumulation_d8") flow_accum_d8.save(flow_accumulation_d8) # Calculate raster statistics and build pyramids arcpy.AddMessage(" Calculating statistics...") arcpy.CalculateStatistics_management(flow_accumulation_d8) arcpy.AddMessage(" Building pyraminds...") arcpy.BuildPyramids_management(flow_accumulation_d8) arcpy.AddMessage("Flow accumulation complete.") # Return arcpy.SetParameter(3, flow_accumulation_d8)
def multi_convert_to_raster(polygon_fc_list, output_workspace): for polygon_fc in polygon_fc_list: env.extent = polygon_fc env.snapRaster = SNAP_RASTER short_name = os.path.splitext(os.path.basename(polygon_fc))[0] arcpy.AddMessage("Converting {}...".format(short_name)) output_raster = os.path.join(output_workspace, short_name + '_raster') lagoslakeid_field = arcpy.ListFields(polygon_fc, '*lagoslakeid')[0].name zoneid_field = arcpy.ListFields(polygon_fc, '*zoneid')[0].name if lagoslakeid_field: zone_field = lagoslakeid_field else: zone_field = zoneid_field output_raster = arcpy.PolygonToRaster_conversion(polygon_fc, zone_field, output_raster, 'CELL_CENTER', cellsize=CELL_SIZE) arcpy.BuildPyramids_management(output_raster, SKIP_FIRST=True) if lagoslakeid_field: arcpy.BuildRasterAttributeTable_management(output_raster, True) arcpy.AddField_management(output_raster, 'lagoslakeid', 'LONG') arcpy.CalculateField_management(output_raster, 'lagoslakeid', '!Value!', 'PYTHON_9.3') arcpy.AddMessage("Completed.")
def heatMapGen(radius, cellSize): print "Setting database and workspace..." + str(time.ctime()) arcpy.env.workspace = "D:/GD/WGiSR/_Konferencje/Plener 2018/heatMap/data.gdb" print " Database and workspace set! " + str(time.ctime()) print "Setting local variables..." + str(time.ctime()) mxd = arcpy.mapping.MapDocument( "D:/GD/WGiSR/_Konferencje/Plener 2018/heatMap/HeatMap.mxd") df = arcpy.mapping.ListDataFrames(mxd)[0] template = "D:/GD/WGiSR/_Konferencje/Plener 2018/heatMap/kd_temp_hypso.lyr" inData = "OSM_STOPS_2180" print " Local variables set! " + str(time.ctime()) print " Generating Heat Map..." + str(time.ctime()) arcpy.Delete_management("kd") arcpy.CheckOutExtension("Spatial") kd = arcpy.sa.KernelDensity(inData, "NONE", cellSize, radius, "SQUARE_KILOMETERS") kd.save("kd") arcpy.BuildPyramids_management("kd") arcpy.MakeRasterLayer_management("kd") layer = arcpy.mapping.Layer("kd") tempLyr = arcpy.mapping.Layer(template) arcpy.ApplySymbologyFromLayer_management(layer, tempLyr) arcpy.mapping.AddLayer(df, layer) mxd.save() print " Heat Map generated! " + str(time.ctime()) print " Exporting map..." + str(time.ctime()) arcpy.mapping.ExportToJPEG( mxd, "D:/GD/WGiSR/_Konferencje/Plener 2018/heatMap/_genMaps/HeatMap_radius_" + str(radius) + ".jpg") print " Map exported! " + str(time.ctime())
def build_stats(outfilename): try: # generate pyramids and statistics for final output gprint('BUILDING OUTPUT STATISTICS & PYRAMIDS' + '\n') arcpy.CalculateStatistics_management(outfilename, "1", "1", "#") arcpy.BuildPyramids_management(outfilename) except: pass
def run_combine(in_directory_species_grids, raster_list_combine, folder, out_folder, region): # sets workspace to locations with input species rasters and generates list arcpy.env.workspace = in_directory_species_grids + os.sep + folder sp_list = arcpy.ListRasters() # combine each species file with the habitat and elevation rasters for spe_raster in sp_list: # loops over each species c_raster_list = raster_list_combine # makes a copy of list with the use layers for combine if spe_raster.split( "_" )[1] in skip_species: # bypasses species in the species skip list set by user continue else: print('Start Generating species overlap files' ) # print for tracking start_raster = datetime.datetime.now( ) # elapse clock for species combine # path species file in_spe = in_directory_species_grids + os.sep + folder + os.sep + spe_raster c_raster_list.insert(0, in_spe) try: # try except loop so an error won't cause the script to stop if not arcpy.Exists(out_folder + os.sep + spe_raster ): # skips files that already exist # inset the path to the species file to list of raster to combine in index position 0 print('Start Combine for {0} with {1}'.format( spe_raster, c_raster_list)) # print for tracking print("Out location will be {0}".format( os.path.join(out_folder, spe_raster[:8]))) out_combine = Combine( c_raster_list ) # runs combine tools from spatial analyst # saves combine; set out name to 8 character to the limit for a ESRI grid out_combine.save(os.path.join(out_folder, spe_raster[:8])) # print for tracking print 'Saved {0} \n Build raster attribute table'.format( out_folder + os.sep + spe_raster[:8]) # Builds raster attribute table and pyramids arcpy.BuildRasterAttributeTable_management( os.path.join(out_folder, spe_raster[:8])) print 'Build pyramids' arcpy.BuildPyramids_management(out_folder + os.sep + spe_raster[:8]) # generates output text files used as lookups to identify habitat, elevation and species output_lookup_tables(spe_raster, c_raster_list, out_folder, region) print 'Completed {0} in {1}'.format( out_folder + os.sep + spe_raster[:8], datetime.datetime.now() - start_raster) except Exception as error: # exceptions for if error trips; print error and deleted any temp file generated print('Error was {0} elapse time was {1}'.format( error.args[0], datetime.datetime.now() - start_raster)) if arcpy.Exists(out_folder + os.sep + spe_raster[:8]): arcpy.Delete_management(out_folder + os.sep + spe_raster[:8]) c_raster_list.remove(in_spe)
def build_stats(raster): """Builds statistics and pyramids for output rasters""" try: arcpy.CalculateStatistics_management(raster, "1", "1", "#") except Exception: gprint('Statistics failed. They can still be calculated manually.') try: arcpy.BuildPyramids_management(raster) except Exception: gprint('Pyramids failed. They can still be built manually.') return
def fill_sinks(workspace, in_raster, out_raster): # after http://desktop.arcgis.com/en/arcmap/10.4/tools/spatial-analyst-toolbox/fill.htm # Set environment settings env.workspace = workspace # Execute ExtractByMask outFill = Fill(in_raster) # Save the output outFill.save(out_raster) arcpy.BuildPyramids_management(out_raster)
def extract(workspace, in_raster, mask, out_raster): # after http://desktop.arcgis.com/en/arcmap/10.4/tools/spatial-analyst-toolbox/extract-by-mask.htm # Set environment settings env.workspace = workspace # Execute ExtractByMask outExtractByMask = ExtractByMask(in_raster, mask) # Save the output outExtractByMask.save(out_raster) arcpy.BuildPyramids_management(out_raster)
def idw(inPointFeatures, zField, out_raster, in_Mask): if os.path.exists(out_raster): return cellSize = 0.02 power = 2 total_raster = os.path.join(os.path.dirname(out_raster), os.path.basename(out_raster) + "IDW") arcpy.CheckOutExtension("GeoStats") arcpy.IDW_ga(inPointFeatures, zField, "", total_raster, cellSize, power) outExtractByMask = ExtractByMask(total_raster, in_Mask) outExtractByMask.save(out_raster) arcpy.BuildPyramids_management(out_raster) arcpy.Delete_management(total_raster)
def Krig(inPointFeatures, zField, outRaster, in_Mask): if os.path.exists(outRaster): return kModel = "CIRCULAR" cellSize = 0.02 total_raster = os.path.join(os.path.dirname(outRaster), os.path.basename(outRaster) + "Kr") arcpy.CheckOutExtension("GeoStats") arcpy.Kriging_3d(inPointFeatures, zField, total_raster, kModel, cellSize) outExtractByMask = ExtractByMask(total_raster, in_Mask) arcpy.Delete_management(total_raster) outExtractByMask.save(outRaster) arcpy.BuildPyramids_management(outRaster)
def get_soc0_999(workspace, in_raster, value_table, out_raster): # Set environment settings env.workspace = workspace # Set local variables field_name = 'mukey' lookup_field = 'soc0_999' arcpy.JoinField_management(in_raster, field_name, value_table, field_name, lookup_field) #arcpy.AddJoin_management(in_raster, field_name, value_table, field_name) outraster = Lookup(in_raster, lookup_field) arcpy.CalculateStatistics_management(outraster) outraster.save(out_raster) arcpy.BuildPyramids_management(out_raster)
def buildPyramids_new(inras, technique): print 'running buildPyramids() function....' #Build Pyramids for single Raster Dataset #Define the type and compression of pyramids in the tool #Skip if dataset already has pyramids pyramid_level = "-1" skipfirst = "NONE" resample_technique = technique compression_type = "JPEG" compression_quality = "100" skipexist = "OVERWRITE" arcpy.BuildPyramids_management(inras, pyramid_level, skipfirst, resample_technique, compression_type, compression_quality, skipexist)
def execute(self, parameters, messages): arcpy.env.overwriteOutput = True for param in parameters: arcpy.AddMessage("Parameter: %s = %s" % (param.name, param.valueAsText)) input_bathymetry = parameters[0].valueAsText output_directory = parameters[2].valueAsText depths = parameters[1].valueAsText if not os.path.exists(output_directory): os.makedirs(output_directory) arcpy.AddMessage("Extracting depths from " + str(input_bathymetry) + ".") depth_list = load_depth_string(depths) arcpy.AddMessage("Depths are: " + str(depth_list)) # Set environment variables for processing env.mask = input_bathymetry env.cellSize = input_bathymetry extraster = arcpy.Raster(input_bathymetry) extent1 = extraster.extent env.extent = extent1 env.workspace = output_directory # Conduct the extraction process # Loop through each depth in the Depths list for item in depth_list: individual_depth = int(float(item)) null_value_clause = "VALUE > " + "-" + str(individual_depth) arcpy.AddMessage("Processing depth: " + str(individual_depth)) output_set_null = SetNull(input_bathymetry, "1", null_value_clause) output_set_null_2 = ApplyEnvironment(output_set_null) output_set_null_2.save( os.path.join(output_directory, "bath" + str(individual_depth))) arcpy.env.pyramid = "PYRAMIDS 3 BILINEAR JPEG" arcpy.BuildPyramids_management( os.path.join(output_directory, "bath" + str(individual_depth))) return
def flow_direction(workspace, in_raster, out_raster): # after http://desktop.arcgis.com/en/arcmap/10.4/tools/spatial-analyst-toolbox/flow-direction.htm # Description: Creates a raster of flow direction from each cell to its # steepest downslope neighbor. # Requirements: Spatial Analyst Extension # Set environment settings env.workspace = workspace # Set local variables inSurfaceRaster = in_raster # Execute FlowDirection outFlowDirection = FlowDirection(inSurfaceRaster, "NORMAL") # Save the output outFlowDirection.save(out_raster) arcpy.BuildPyramids_management(out_raster)
def main(out_directory, in_directory_species_grids, skip_reg): start_time = datetime.datetime.now() # elapse clock print "Start Time: " + start_time.ctime() arcpy.CheckOutExtension("Spatial") # Check out license # create out directory if it does not exist if not os.path.exists(out_directory): os.mkdir(out_directory) list_dir = os.listdir( in_directory_species_grids ) # get list of folders with species files - starts with region for folder in list_dir: region = folder.split('_')[0] # extract region abb print("\nWorking on {0}".format(region)) # print for tracking if region in skip_reg: # bypass region is in skip region set by user continue else: # Create out folder if it doesn't exist out_folder = out_directory + os.sep + folder if not os.path.exists(out_folder): os.mkdir(out_folder) set_extent(region, snap_dict) # set processing extent print('Generating lists of rasters to combine' ) # print for tracking if include_on_off: # user defined inputs - if on/off is should be included variable will be true # NOTE in CONUS when including on/off the number of unique values is extremely high hab_to_combine = get_hab_ele_rast_path(region, raster_layer_libraries) rast_to_combine = get_on_off_field(hab_to_combine, region, raster_layer_libraries) else: rast_to_combine = get_hab_ele_rast_path( region, raster_layer_libraries) for r in rast_to_combine: # Build att table and pyramids for each raster in combine print( 'Building attribute tables and pyramids for {0}'.format(r)) arcpy.BuildRasterAttributeTable_management(r) arcpy.BuildPyramids_management(r) # Run combine for each species get list of species raster for region run_combine(in_directory_species_grids, rast_to_combine, folder, out_folder, region) end = datetime.datetime.now() # end elapse clock print "End Time: " + end.ctime() elapsed = end - start_time print "Elapsed Time: " + str(elapsed)
def LayerTif(tifPath): #检查文件是否存在 print "路径有效!正在检查文件夹是否存在TIF数据……" if os.path.exists(tifPath) == False: print "指定路径的tif数据不存在!" return # 创建.lyr文件 try: print "构建影像金字塔" arcpy.BuildPyramids_management(tifPath) # print "我去年买了表" # arcpy.env.cellSize = tifPath # print arcpy.env.cellSize name, ext = os.path.splitext(tifPath) RDlyr = name + ".lyr" arcpy.MakeRasterLayer_management(tifPath, name, "", "", "") arcpy.SaveToLayerFile_management(name, RDlyr, "RELATIVE") except Exception, e: print "error: ", e return
def flow_accumulation(workspace, in_raster, out_raster): # after http://desktop.arcgis.com/en/arcmap/10.4/tools/spatial-analyst-toolbox/flow-accumulation.htm # Description: Creates a raster of accumulated flow to each cell. # Requirements: Spatial Analyst Extension # Set environment settings env.workspace = workspace # Set local variables inFlowDirRaster = in_raster inWeightRaster = '' dataType = 'INTEGER' # Execute FlowDirection outFlowAccumulation = FlowAccumulation(inFlowDirRaster, inWeightRaster, dataType) # Save the output outFlowAccumulation.save(out_raster) arcpy.BuildPyramids_management(out_raster)
def slope(workspace, in_raster, out_raster): # after http://desktop.arcgis.com/en/arcmap/10.4/tools/spatial-analyst-toolbox/slope.htm # Description: Identifies the rate of maximum change # in z-value from each cell. # Requirements: Spatial Analyst Extension # Set environment settings env.workspace = workspace # Set local variables inRaster = in_raster outMeasurement = 'DEGREE' # Execute Slope slopeDeg = Slope(inRaster, outMeasurement) slopeRad = Times(slopeDeg, math.pi / 180) # Save the output slopeRad.save(out_raster) arcpy.BuildPyramids_management(out_raster)
def multi_convert_to_raster(polygon_fc_list, output_workspace): for polygon_fc in polygon_fc_list: if 'hu12' in polygon_fc or 'buff' in polygon_fc or ('ws' in polygon_fc and 'nws' not in polygon_fc): cell_size = 30 else: cell_size = CELL_SIZE env.extent = polygon_fc env.snapRaster = SNAP_RASTER short_name = os.path.splitext(os.path.basename(polygon_fc))[0] arcpy.AddMessage("Converting {}...".format(short_name)) output_raster = os.path.join(output_workspace, short_name + '_raster') zoneid_field = arcpy.ListFields(polygon_fc, '*zoneid')[0].name output_raster = arcpy.PolygonToRaster_conversion(polygon_fc, zoneid_field, output_raster, 'CELL_CENTER', cellsize = cell_size) arcpy.BuildPyramids_management(output_raster, SKIP_FIRST = True) arcpy.AddMessage("Completed.")
def do_calc(_): r = fct(_) if r is not None and 'save' in dir(r): # do this so we can also use functions that don't return a raster r.save(_) del r if buildPyramids: has_pyramids = False #~ # HACK: really not working for some reason #~ retry = 0 #~ while retry < retries and not has_pyramids: try: print "Building pyramids" arcpy.BuildPyramids_management(_) has_pyramids = True except: #~ retry += 1 #~ delay = math.pow(2, retry) #~ print "Building pyramids failed. Retrying in {} seconds...".format(delay) print "Building pyramids failed." #~ time.sleep(delay) return _
def build_pyramids(): f = 'E:\\FVC内蒙古植被覆盖数据\\fusion\\fusion_int\\2018.tif' arcpy.env.workspace = "C:/Workspace" inws = "folder" includedir = "INCLUDE_SUBDIRECTORIES" buildpy = "BUILD_PYRAMIDS" calcstats = "CALCULATE_STATISTICS" buildsource = "NONE" blockfield = "#" estimatemd = "#" skipx = "4" skipy = "6" ignoreval = "0;255" pylevel = "3" skipfirst = "NONE" resample = "BILINEAR" compress = "JPEG" quality = "80" skipexist = "SKIP_EXISTING" # arcpy.BuildPyramidsAndStatistics_management() print('building '+f) arcpy.BuildPyramids_management(f)
def topographic_wetness_index(workspace, flow_accumulation_raster, slope_raster, out_raster): # Description: Computes topographic wetness index using flow accumulation # and slope after Quin et al. 1991 (note that we assume 30m # cell size, so cell_size_squared = 30^2 = 900) # # Quinn, P. F. B. J., et al. # "The prediction of hillslope flow paths for distributed hydrological # modelling using digital terrain models." # Hydrological processes 5.1 (1991): 59-79. # DOI: 10.1002/hyp.3360050106 # # Requirements: Spatial Analyst Extension # Set environment settings env.workspace = workspace # Execute math processors # Note that each one of these creates a raster file in the workspace, # but we only save the last one. cell_size_squared = 900 tan_slope_raster = Tan(slope_raster) squared_flow_accumulation_raster = Times(flow_accumulation_raster, cell_size_squared) quotient = Divide(squared_flow_accumulation_raster, tan_slope_raster) twi = Ln(quotient) # We need to normalize the twi values: (twi - twi_min) / (twi_max - twi_min) twi_min_result = arcpy.GetRasterProperties_management(twi, "MINIMUM") twi_max_result = arcpy.GetRasterProperties_management(twi, "MAXIMUM") twi_min = float(twi_min_result.getOutput(0)) twi_max = float(twi_max_result.getOutput(0)) twi_top = Minus(twi, twi_min) twi_bottom = twi_max - twi_min twi_norm = Divide(twi_top, twi_bottom) # Save the output twi_norm.save(out_raster) arcpy.BuildPyramids_management(out_raster)
def PrjExtRaster(outTemp, theSR, cSize, fcBuf, Year, ACPFlkup): # Project to Buf's projection, unless it's in Albers if theSR != '43007': prjTemp = "%s//PRJTemp.tif" % env.scratchFolder arcpy.ProjectRaster_management(outTemp, prjTemp, fcBuf, "NEAREST", cSize) else: prjTemp = outTemp # Majority filter to reduce S&P env.snapRaster = prjTemp arcpy.AddMessage(" Majority") if cSize > 30: RMaj1 = MajorityFilter(prjTemp, "EIGHT", "HALF") RMaj2 = MajorityFilter(RMaj1, "EIGHT", "HALF") RMaj3 = MajorityFilter(RMaj2, "FOUR", "HALF") RMajF = MajorityFilter(RMaj3, "FOUR", "HALF") else: RMaj1 = MajorityFilter(prjTemp, "EIGHT", "HALF") RMajF = MajorityFilter(RMaj1, "FOUR", "HALF") # Extract extNASS = ExtractByMask(RMajF, fcBuf) extNASS.save("wsCDL%s" % Year) # join arcpy.JoinField_management(extNASS, "Value", ACPFlkup, "Value", ["CLASS_NAME", "ROTVAL"]) #fini arcpy.BuildPyramids_management(extNASS) # cleanup arcpy.Delete_management(outTemp) if arcpy.Exists(prjTemp): arcpy.Delete_management(prjTemp)
import arcpy from arcpy import env from datetime import datetime import logging start_timestamp = datetime.now() # path to folder with rasters env.workspace = r"C:\temp" # create logging file logging.basicConfig(filename="BuildStatisticsScriptLog.txt", level=logging.DEBUG) RasterList = arcpy.ListRasters("*") # list rasters TotalRasterCount = len(RasterList) Counter = 0 # set counter for print messages for raster in RasterList: Counter += 1 arcpy.BuildPyramids_management(raster, "-1", "NONE", "NEAREST", "DEFAULT", "75", "SKIP_EXISTING") print "Done with " + raster + " (" + str(Counter) + " out of " + str(TotalRasterCount) + ")" logging.info("done with " + raster + " (" + str(Counter) + " out of " + str(TotalRasterCount) + ")") print "done in: " + str(datetime.now() - start_timestamp) logging.info("done at " + str(datetime.now()))
try: maps_dir = glob.glob(maps_dir)[0] except: logging.info('No such folder: %s' % (maps_dir)) continue print('UTM zone: %s' % (UTMz_name)) env.workspace = maps_dir dat_files_list = arcpy.ListFiles("*.dat") logging.info('Nr map files for %s: %d' % (UTMz_name, len(dat_files_list))) ## Build pyramids for all .dat files in folder for layer in dat_files_list: print(layer) arcpy.BuildPyramids_management( os.path.join(env.workspace, layer), skip_existing="SKIP_EXISTING" ) ## do not create pyramids if they already exist print(toc(start_time)) ## Test with parallel approach (not exactly working!) #pool = multiprocessing.Pool(24, None, None, 1) # #pool.map(arcpy.BuildPyramids_management, arcpy.ListFiles()) # ## closing pool and waiting for each task to be finished #pool.close() #pool.join()
df = arcpy.mapping.ListDataFrames(mxd, "Layers")[0] sr = arcpy.SpatialReference(2193) print df.name #todo export jpegs print 'export JPEG' for pageNum in range(1, mxd.dataDrivenPages.pageCount + 1): print 'export ' + str(pageNum) + '...' mxd.dataDrivenPages.currentPageID = pageNum #todo: HARDCODE BELOW r"D:\tmp\Output" arcpy.mapping.ExportToJPEG(mxd, r"D:\tmp\Output" + str(pageNum) + ".jpg", df, df_export_width=1200, df_export_height=1600, world_file=True, resolution=300, color_mode="24-BIT_TRUE_COLOR") arcpy.DefineProjection_management(r"D:\tmp\Output" + str(pageNum) + ".jpg", sr) arcpy.BuildPyramids_management(r"D:\tmp\Output" + str(pageNum) + ".jpg") #todo remove Object del mxd #todo my Cool Message print "Susan is Very cool"
def build_pyramid(in_raster): arcpy.BuildPyramids_management(in_raster)
def createpyramids(inraster): """although this is a one liner, it could maybe use some customization""" arcpy.BuildPyramids_management(inraster) return True
import arcpy import os from arcpy import env from arcpy.sa import * import csv env.overwriteOutput = True env.workspace = r'D:\1. Codes and Programs\1. river detection comparison methods\00_my_Matlab_codes_for_single_image\sentinel2_T22WEV' rasters = arcpy.ListRasters("*", "tif") for raster in rasters: print(raster) arcpy.CalculateStatistics_management(raster) arcpy.BuildPyramids_management(raster)
] # get list of on/off field layers for region # on_off_field = raster_layer_libraries + os.sep + region + '_' + 'OnOffField.gdb' # arcpy.env.workspace = on_off_field # raster_list_on_off = arcpy.ListRasters() # # add path to the raster name for combine because the workspace is changing # # append the habitat and elevation raster to to lst of raster to be combine with the path to the gdb # # because the workspace is changing # for v in raster_list_on_off : # raster_list_combine.append(on_off_field + os.sep + v) # build att table and builds for all rasters in combine for r in raster_list_combine: arcpy.BuildRasterAttributeTable_management(r) arcpy.BuildPyramids_management(r) # get list of species raster for region arcpy.env.workspace = in_directory_species_grids + os.sep + folder sp_list = arcpy.ListRasters() # combine each species file with the habitat and elevation rasters for spe_raster in sp_list: if spe_raster.split("_")[1] in skip_species: continue else: start_raster = datetime.datetime.now() if not arcpy.Exists(out_folder + os.sep + spe_raster): in_spe = in_directory_species_grids + os.sep + folder + os.sep + spe_raster # inset the path to the species file to list of raster to combine in index position 1 raster_list_combine.insert(0, in_spe)