def clip(raster, nhd_gdb, projection, outfolder): env.workspace = nhd_gdb env.outputCoordinateSystem = projection env.compression = "NONE" # only final tifs are generated env.pyramid = "NONE" # Create a feature dataset in NHD file geodatabase named "HUC8_Albers" in Albers projection out_feature_dataset = "HUC8_Albers" arcpy.CreateFeatureDataset_management(env.workspace, out_feature_dataset, projection) arcpy.RefreshCatalog(nhd) # HUC8 polygons each saved as separate fc inheriting albers from environ huc8_fc = "WBD_HU8" field = "HUC_8" arcpy.MakeFeatureLayer_management(huc8_fc, "huc8_layer") with arcpy.da.SearchCursor(huc8_fc, field) as cursor: for row in cursor: if row[0].startswith(nhdsubregion): whereClause = ''' "%s" = '%s' ''' % (field, row[0]) arcpy.SelectLayerByAttribute_management( "huc8_layer", 'NEW_SELECTION', whereClause) arcpy.CopyFeatures_management( "huc8_layer", os.path.join(out_feature_dataset, "HUC" + row[0])) #retrieve only the single huc8 fcs and not the one with all of them fcs = arcpy.ListFeatureClasses("HUC%s*" % nhdsubregion, "Polygon", out_feature_dataset) fcs_buffered = [ os.path.join(out_feature_dataset, fc + "_buffer") for fc in fcs ] out_clips = [ os.path.join(outfolder, "huc8clips" + nhdsubregion, "NED" + fc[3:] + ".tif") for fc in fcs ] # Buffer HUC8 feature classes by 5000m for fc, fc_buffered in zip(fcs, fcs_buffered): arcpy.Buffer_analysis(fc, fc_buffered, "5000 meters") cu.multi_msg("Created HUC8 buffers.") arcpy.RefreshCatalog(nhd) # Clips rasters cu.multi_msg("Starting HUC8 clips...") for fc_buffered, out_clip in zip(fcs_buffered, out_clips): arcpy.Clip_management(raster, '', out_clip, fc_buffered, "0", "ClippingGeometry") arcpy.Compact_management(nhd) cu.multi_msg("Clipping complete.")
def delete_dir(dir): if arcpy.Exists(dir): try: arcpy.RefreshCatalog(dir) shutil.rmtree(dir) except: # In case rmtree was unsuccessful due to lock on data try: arcpy.RefreshCatalog(dir) arcpy.Delete_management(dir) except: pass return
def create_point_feature_class(pointFeatureClass): if arcpy.Exists(pointFeatureClass): delete_feature_class(pointFeatureClass) arcpy.RefreshCatalog(os.path.dirname(pointFeatureClass)) try: arcpy.CreateFeatureclass_management( os.path.dirname(pointFeatureClass), os.path.basename(pointFeatureClass), "POINT") arcpy.RefreshCatalog(os.path.dirname(pointFeatureClass)) except: config.run_error_message(pointFeatureClass, "Point feature class creation failure") gc.collect()
def main(argv=None): success = True gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] name = '' for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue table = gzSupport.getFullName(name, tNames, tFullNames) gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue gzSupport.sourceNameField = dataset.getAttributeNode( "sourceNameField").nodeValue if not arcpy.Exists(table): gzSupport.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dataset.getElementsByTagName("Field") try: attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = gzSupport.workspace targetName = gzSupport.getNodeValue(field, "TargetName") gzSupport.addGizintaField(table, targetName, field, attrs) retVal = setFieldValues(table, fields) if retVal == False: success = False gzSupport.logDatasetProcess(name, "Fields", retVal) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("fieldCalculator", name, False) finally: arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information" ) if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv=None): success = True name = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Layers...", 0, progBar, 1) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") if not arcpy.Exists(sourceLayer): gzSupport.addError("Layer " + sourceLayer + " does not exist, exiting") return target = os.path.join(gzSupport.workspace, targetName) arcpy.env.Workspace = gzSupport.workspace if not arcpy.Exists(target): gzSupport.addMessage("Feature Class " + target + " does not exist") else: arcpy.Delete_management(target) try: retVal = exportDataset(sourceLayer, targetName, dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName, targetName, retVal) arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("extractLayerToGDB", name, False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def recalculate_mosaic_statistics(mosaic_dataset): """ Recalculates ArcGIS Mosaic statistics and pyramids.""" san.arc_print("Recalculating Mosaic Statistics.") arcpy.management.CalculateStatistics(mosaic_dataset) arcpy.management.BuildPyramidsandStatistics(mosaic_dataset, 'INCLUDE_SUBDIRECTORIES', 'BUILD_PYRAMIDS', 'CALCULATE_STATISTICS') arcpy.RefreshCatalog(mosaic_dataset)
def tidy_delete_old_files(self, tidy_list): """ Tries to delete each item in tidy_list. """ for item in tidy_list: arcpy.RefreshCatalog(MAIN_PATH) item_path = os.path.join(MAIN_PATH, item) if arcpy.Exists(item_path): try: arcpy.Delete_management(item_path) message("{} deleted".format(item)) except Exception as e: message("~ Unable to delete {}. {}".format(item, str(e)))
def calculate_linker(featureClass): if not arcpy.Exists(featureClass): raise ExistsError try: arcpy.RefreshCatalog(os.path.dirname(featureClass)) describeFeatureClass = arcpy.Describe(featureClass) oidField = "[" + describeFeatureClass.OIDFieldName + "]" arcpy.CalculateField_management(featureClass, "LINKER", oidField) except: config.run_error_message(featureClass, "LINKER calculation failure") gc.collect()
def TestSchrijfbaarheidDoelen(): #=============================== # opm: => mode = os.stat(pathname)[ST_MODE] --> te omslachtig, onduidelijk # => gewoon een schrijftestje doen, eerst een klein testraster aanmaken # bv: testRas = ap.sa.CreateConstantRaster(1, "INTEGER", 1, ap.sa.Extent(50,50,52,52)) # ?? maar dit geeft soms probl, tot zelfs crash, afh van extent ?? => BodemRas gebr # !! wat zeker crash geeft als alles (pH,Trofie, Veget) gevraagd wordt: 1 gemeensch ras # => voor beide testen: afzonderlijk testras aanmaken en saven om dit te vermijden BodKrt = KrtPadDct[KAART_BODEM] # BodemKaart oblig aanw in alle gevallen X = float(ap.management.GetRasterProperties(BodKrt, "LEFT").getOutput(0)) + 0.1 Y = float( ap.management.GetRasterProperties(BodKrt, "BOTTOM").getOutput(0)) + 0.1 if Vegetatie_berekenen: testRas = ap.sa.ExtractByPoints( BodKrt, [ap.Point(X, Y)]) # 1 gridcel gebruiken testRasKrt = VegResRWS + SEP + "schrTEST" try: testRas.save(testRasKrt) ap.RefreshCatalog(VegResRWS) ap.Delete_management(testRasKrt) ap.RefreshCatalog(VegResRWS) except: raise FOUT("Probleem bij schrijftest naar doelpad '" +VegResRWS+ "' "+ \ "(voor resultaten vegetatiebereking) -- SCHRIJFRECHTEN OK ?-\n"+ \ str(sys.exc_type)+ ": "+str(sys.exc_value)) del testRas if (pH_berekenen or Trofie_berekenen): testRas2 = ap.sa.ExtractByPoints( BodKrt, [ap.Point(X, Y)]) # 1 gridcel gebruiken testRasKrt2 = pH_Trofie_RWS + SEP + "schrTEST2" try: testRas2.save(testRasKrt2) ap.RefreshCatalog(pH_Trofie_RWS) ap.Delete_management(testRasKrt2) ap.RefreshCatalog(pH_Trofie_RWS) except: raise FOUT("Probleem bij schrijftest naar doelpad '" +pH_Trofie_RWS+ "' "+ \ "(voor pH/Trofie berekening) -- SCHRIJFRECHTEN OK ?\n"+ \ str(sys.exc_type)+ ": "+str(sys.exc_value)) del testRas2
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False gzSupport.logDatasetProcess(name,fieldName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess(name,"",False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def update_mosaic_statistics(mosaic_dataset): logging.debug('updating mosaic statistics') arcpy.SetMosaicDatasetProperties_management( mosaic_dataset, use_time="ENABLED", start_time_field="start_date", end_time_field="end_date", ) arcpy.management.CalculateStatistics(mosaic_dataset) arcpy.management.BuildPyramidsandStatistics(mosaic_dataset, 'INCLUDE_SUBDIRECTORIES', 'BUILD_PYRAMIDS', 'CALCULATE_STATISTICS') arcpy.RefreshCatalog(mosaic_dataset)
def MaakTempDir(DoelDir): #======================== DoelDir_DT = ap.Describe(DoelDir).DataType if DoelDir_DT == "Folder": tempdirPad = DoelDir elif DoelDir_DT == "Workspace": tempdirPad = os.path.dirname(DoelDir) else: raise FOUT("Doelpad '" + DoelDir + "' heeft een onverwacht datatype !?!") tempdir = tempdirPad + SEP + "___tmpdir_NICHE___" try: if ap.Exists(tempdir): rmtree(tempdir) os.mkdir(tempdir) ap.RefreshCatalog(tempdirPad) except: raise FOUT("Probleem bij aanmaken tijdelijke werkdir (onder '"+tempdirPad+"') !\n"+ \ str(sys.exc_type)+ ": "+str(sys.exc_value)) return tempdir
def DebrisAreaSegmentation(debarea,fishnetRes,lookDistance,workspace): import os,arcpy from arcpy import env desc = arcpy.Describe(debarea) spatialRef = arcpy.Describe(debarea).spatialReference arcpy.CreateFishnet_management("Cliff_"+str(fishnetRes)+"fishnet.shp",str(desc.extent.lowerLeft),str(desc.extent.XMin) + " " + str(desc.extent.YMax + 10),fishnetRes,fishnetRes,"0","0",str(desc.extent.upperRight),"NO_LABELS","#","POLYGON") # create 'value' to dissolve further down arcpy.AddField_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "value", "SHORT", 1, "", "", "", "", "") arcpy.MakeFeatureLayer_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "tempLayer") arcpy.SelectLayerByLocation_management("tempLayer", 'WITHIN_A_DISTANCE', debarea,str(-1) + " meters") arcpy.SelectLayerByAttribute_management ("tempLayer", "SWITCH_SELECTION") arcpy.DeleteFeatures_management("tempLayer") arcpy.AddField_management("Cliff_"+str(fishnetRes)+"fishnet.shp",'FIDc','SHORT') arcpy.CalculateField_management ("Cliff_"+str(fishnetRes)+"fishnet.shp", "FIDc", "!FID!", "PYTHON_9.3") arcpy.DefineProjection_management("Cliff_"+str(fishnetRes)+"fishnet.shp", spatialRef) arcpy.Intersect_analysis (["Cliff_"+str(fishnetRes)+"fishnet.shp",debarea], "tiles.shp", "ALL", "", "") arcpy.AddField_management('tiles.shp','Perc_gl','FLOAT') rows = arcpy.UpdateCursor("tiles.shp") for row in rows: row.Perc_gl = (row.shape.area/fishnetRes**2)*100 rows.updateRow(row) del row, rows arcpy.JoinField_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "FIDc", "tiles.shp", "FIDc", ["Perc_gl"]) counter = 0 while True: if arcpy.management.GetCount("Cliff_"+str(fishnetRes)+"fishnet.shp")[0] == "0": break else: n = [] rows = arcpy.SearchCursor("Cliff_"+str(fishnetRes)+"fishnet.shp") for row in rows: n.append(row.getValue("FIDc")) del row, rows n.sort() arcpy.SelectLayerByAttribute_management("tempLayer", "CLEAR_SELECTION") noSelection = [] noSelection = int(str(arcpy.GetCount_management("tempLayer"))) arcpy.SelectLayerByAttribute_management("tempLayer", "NEW_SELECTION", "FIDc="+ str(n[0])) arcpy.SelectLayerByLocation_management("tempLayer", "SHARE_A_LINE_SEGMENT_WITH","tempLayer", "", "NEW_SELECTION") arcpy.SelectLayerByAttribute_management("tempLayer", "REMOVE_FROM_SELECTION", "FIDc="+ str(n[0])) result = [] result = arcpy.GetCount_management("tempLayer") if int(result.getOutput(0)) == noSelection: #condition where no tiles share a line segment arcpy.SelectLayerByAttribute_management("tempLayer", "NEW_SELECTION", "FIDc="+ str(n[0])) arcpy.SelectLayerByLocation_management("tempLayer","WITHIN_A_DISTANCE","tempLayer", str(fishnetRes*lookDistance) + " meters", "NEW_SELECTION") arcpy.SelectLayerByAttribute_management("tempLayer", "REMOVE_FROM_SELECTION", "FIDc="+ str(n[0])) #if still no shapes after look distance result = arcpy.GetCount_management("tempLayer") if int(result.getOutput(0)) == 0: arcpy.CreateFeatureclass_management(workspace, "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp", "POLYGON","tempLayer") else: arcpy.CopyFeatures_management("tempLayer", "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp") else: arcpy.CopyFeatures_management("tempLayer", "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp") # populate listFIDc: unique ID of 'share a boundary' shapes in "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp" listFIDc = [] tiles = arcpy.SearchCursor("Cliff_"+str(fishnetRes)+"fishnet_iteration.shp") for tile in tiles: flag = True b = tile.getValue("FIDc") listFIDc.append(b) if not flag: listFIDc = [] # iterate through features in "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp" and find one (if exists) with a summed area below fishnetRes^2 tileNumber = len(listFIDc) tileCount = 0 summation = 101 breakTracker = [] while summation > 100: print str(tileCount)+" of "+str(tileNumber)+" (tileCount of tileNumber)" arcpy.SelectLayerByAttribute_management("tempLayer", "CLEAR_SELECTION") if tileCount == tileNumber: if os.path.exists(workspace+"DebrisCutForCliffs"+str(counter)+".shp"): arcpy.Delete_management(workspace+"DebrisCutForCliffs"+str(counter)+".shp") arcpy.RefreshCatalog(workspace) pathFinal = workspace+"DebrisCutForCliffs"+str(counter)+".shp" else: pathFinal = workspace+"DebrisCutForCliffs"+str(counter)+".shp" # extract deb area arcpy.SelectLayerByAttribute_management("tempLayer", "NEW_SELECTION", "FIDc="+ str(n[0])) arcpy.Intersect_analysis (["tempLayer", debarea], pathFinal) arcpy.DeleteFeatures_management("tempLayer") arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet_iteration.shp") counter = counter+1 print "Counter updated: "+str(counter) breakTracker = 1 break else: arcpy.SelectLayerByAttribute_management("tempLayer", "NEW_SELECTION", "FIDc="+ str(n[0])) arcpy.SelectLayerByAttribute_management("tempLayer", "ADD_TO_SELECTION", "FIDc="+ str(listFIDc[tileCount])) areaList = [] rows = arcpy.SearchCursor("tempLayer") for row in rows: s = row.getValue("Perc_gl") areaList.append(s) del row, rows print "areaList:" print(areaList) summation = sum(areaList) print "summation: "+str(summation) #if summation <= 100: # break #else: tileCount = tileCount+1 print "tileCount "+str(tileCount-1) +" updated to "+str(tileCount) continue if breakTracker == 1: breakTracker = [] continue else: if not os.path.exists(workspace+"DebrisCutForCliffs0.shp"): pathDissolve = workspace+"DebrisDissolveForCliffs0.shp" pathFinal = workspace+"DebrisCutForCliffs0.shp" else: fcListFinal = arcpy.ListFeatureClasses("*DebrisCutForCliffs*") fcListFinal.sort() s = fcListFinal[::-1][0] if counter - int(s.split("Cliffs",1)[1].split(".shp")[0]) == 0: arcpy.Delete_management(workspace+"DebrisCutForCliffs"+str(counter)+".shp") arcpy.Delete_management(workspace+"DebrisDissolveForCliffs"+str(counter)+".shp") arcpy.RefreshCatalog(workspace) pathDissolve = workspace+"DebrisDissolveForCliffs"+str(counter)+".shp" pathFinal = workspace+"DebrisCutForCliffs"+str(counter)+".shp" else: pathDissolve = workspace+"DebrisDissolveForCliffs"+str(counter)+".shp" pathFinal = workspace+"DebrisCutForCliffs"+str(counter)+".shp" # merge two tiles arcpy.Dissolve_management("tempLayer", pathDissolve,"value") # extract deb area arcpy.Intersect_analysis ([pathDissolve, debarea], pathFinal) # update Perc_gl fields = ['Perc_gl'] fieldList = arcpy.ListFields(pathDissolve) fieldName = [f.name for f in fieldList] for field in fields: if field in fieldName: print "Field 'Perc_gl' already exists, not replaced" else: arcpy.AddField_management(pathDissolve, field, 'FLOAT') del field, fields del f, fieldList del fieldName # update FIDc rows = arcpy.UpdateCursor(pathDissolve) for row in rows: row.Perc_gl = summation rows.updateRow(row) del row, rows fields = ['FIDc'] fieldList = arcpy.ListFields(pathDissolve) fieldName = [f.name for f in fieldList] for field in fields: if field in fieldName: print "Field 'FIDc' already exists, not replaced" else: arcpy.AddField_management(pathDissolve, field,'SHORT') del field, fields del f, fieldList del fieldName features = arcpy.UpdateCursor(pathDissolve) for feature in features: feature.FIDc = counter features.updateRow(feature) del feature,features arcpy.MakeFeatureLayer_management(pathDissolve, "tempLayer1") arcpy.SelectLayerByAttribute_management("tempLayer", "CLEAR_SELECTION") arcpy.Update_analysis("tempLayer","tempLayer1", "update.shp") arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet.shp") arcpy.RefreshCatalog(workspace) arcpy.Rename_management("update.shp","Cliff_"+str(fishnetRes)+"fishnet.shp") arcpy.RefreshCatalog(workspace) arcpy.MakeFeatureLayer_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "tempLayer") #Delete last feature to exit while loop if arcpy.management.GetCount("Cliff_"+str(fishnetRes)+"fishnet.shp")[0] == "1": arcpy.MakeFeatureLayer_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "tempLayer2") arcpy.SelectLayerByLocation_management("tempLayer2", 'WITHIN_A_DISTANCE', workspace+"\\DebrisCutForCliffs"+str(counter)+".shp",str(-1) + " meters") arcpy.DeleteFeatures_management("tempLayer2") arcpy.Delete_management(pathDissolve) arcpy.Delete_management("tempLayer1") arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet_iteration.shp") print "tile "+str(counter)+" assigned" continue else: arcpy.Delete_management(pathDissolve) arcpy.Delete_management("tempLayer1") arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet_iteration.shp") print "tile "+str(counter)+" assigned" continue arcpy.Delete_management("tempLayer") arcpy.Delete_management("tiles.shp") arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet.shp")
#!! rasters ReclResGxG moeten al 0/1 waarden hebben VOOR de vermenigvuldiging hier ! RclsSomGHGras = ReclassifySomGxG(somGHG, RngGHG, VegKode, "somGHG") RclsSomGLGras = ReclassifySomGxG(somGLG, RngGLG, VegKode, "somGLG") uitTms = RclsSomGHGras * RclsSomGLGras uitTms.save(VegKansResult) ap.Delete_management(rcl_f_nm) del somGLG, somGHG, RclsSomGHGras, RclsSomGLGras, uitTms # Opruimen # -------- ap.AddMessage("\n") try: if ap.Exists(tmpdir): rmtree(tmpdir) ap.RefreshCatalog(os.path.dirname(tmpdir)) except: ap.AddWarning("Mogelijk probleempje bij opruimen tijdelijke TEMPdir '"+tmpdir+"'; " \ "deze mag gewist worden.\n" +str(sys.exc_value)) except FOUT as f: ap.AddError("\nFOUT-> " + f.message + "\n") if ap.Exists(tmpdir): rmtree(tmpdir) ap.AddWarning("Mogelijk blijft een tijdelijke TEMPdir '"+tmpdir+ \ "' achter; deze mag gewist worden.\n") except: ap.AddError("\nFOUT in blok " + prgblok + "\n" + str(sys.exc_type) + ": " + str(sys.exc_value)) if ap.Exists(tmpdir):
# Filename = DropAndCalc.py # Purpose: Drops unrequired fields except NHD_ID and then adds and updates geometry fields import arcpy, os, time fc = arcpy.GetParameterAsText(0) def dropfields(fc): try: fields = arcpy.ListFields(fc) dropfields = [] for field in fields: if not field.required: dropfields.append(field.name) if field.name in "NHD_ID": dropfields.remove(field.name) print "Dropping fields %s " % (dropfields) arcpy.DeleteField_management(fc, dropfields) print "Updating Geometry fields" arcpy.AddField_management(fc, "Area_ha","DOUBLE") arcpy.CalculateField_management(fc, "Area_ha", "!shape.area@hectares!", "PYTHON") arcpy.AddField_management(fc, "Perim_km", "DOUBLE") arcpy.CalculateField_management(fc, "Perim_km", "!shape.length@kilometers!", "PYTHON") del fields del dropfields except: arcpy.AddMessage("Something went wrong. Maybe you already ran this one?") pass dropfields(fc) arcpy.RefreshCatalog(fc) time.sleep(5)
outFolder = str(arcpy.CreateFolder_management(inFolder, "processing_results")) rasterList = arcpy.ListRasters() for raster in rasterList: outName = os.path.basename(raster).rstrip(os.path.splitext(raster)[1]) print(u"\u2022" + " Processing raster: " + str(outName) + "...") arcpy.gp.Times_sa(raster, scaleFactor, "in_memory/ndvi") arcpy.gp.ExtractByMask_sa("in_memory/ndvi", extent, outFolder + "\\" + outName + "_p.tif") print(u"\u2022" + " Cleaning workspace...") arcpy.env.workspace = outFolder itemList = arcpy.ListFiles() for item in itemList: if str(os.path.splitext(item)[1].lstrip(".")) != str("tif"): arcpy.gp.Delete_management(item) endTime = datetime.datetime.now() print(u"\u2022" + " End time: " + str(endTime)) elapsedTime = endTime - startTime print(u"\u2022" + " Elapsed time: " + str(elapsedTime)) countFiles = len(os.listdir(outFolder)) print(u"\u2022" + " " + str(countFiles) + " raster files processed") print(u"\u2022" + " Folder containing output files: " + str(outFolder)) print(u"\u2022" + " Processing complete!") arcpy.RefreshCatalog(inFolder) arcpy.RefreshCatalog(outFolder) arcpy.gp.Delete_management("in_memory") arcpy.ClearWorkspaceCache_management()
# indexLayer = mxd.dataDrivenPages.indexLayer # fields = arcpy.ListFields(indexLayer) # field_names = [f.name for f in fields] # lyrList = arcpy.mapping.ListLayers(df) # for lyr in lyrList: # if lyr.name == indexLayer.name: # dsrc = lyr.dataSource # pnfield = mxd.dataDrivenPages.pageNameField # pname = pnfield.name # Main arcpy.CreateFeatureclass_management(wkspace, fcname, "POLYGON", "", "DISABLED", "DISABLED", spr) arcpy.RefreshCatalog(fcname) arcpy.AddField_management(outfc, "DDP_PAGE_NO", "TEXT", "", "", 50) with arcpy.da.InsertCursor(fcname, field_names) as cur: for pageNum in range(1, mxd.dataDrivenPages.pageCount + 1): mxd.dataDrivenPages.currentPageID = pageNum pageNumText = str(pageNum) ex = df.extent test = (ex.XMin, ex.YMin, ex.XMax, ex.YMax) if test not in compare: v1 = arcpy.Point( ex.XMin, ex.YMin) # Point coords clockwise from bottom left of frame v2 = arcpy.Point(ex.XMin, ex.YMax) v3 = arcpy.Point(ex.XMax, ex.YMax)
fourhajunction_lyr = os.path.join(outfolder, "fourhajunction.lyr") # Create folder for final output if not os.path.exists(os.path.join(outfolder, "IWS")): os.mkdir(os.path.join(outfolder, "IWS")) iws = os.path.join(outfolder, "IWS") arcpy.MakeFeatureLayer_management(tenhajunction, os.path.join(outfolder, "tenhajunction.lyr")) tenhajunction_lyr = os.path.join(outfolder, "tenhajunction.lyr") arcpy.AddMessage("Starting iteration.") for fc in fcs: arcpy.RefreshCatalog(outfolder) name = os.path.splitext(fc)[0] arcpy.AddMessage("Processing " + name + ".") # Sets the output to in memory: lakes = "in_memory" # Repair the lake geometery if needed. arcpy.RepairGeometry_management(fc) # Make sure the lake's own watershed gets added (merged) back in to the final aggregated watershed: # Make a centroid for the lake, then intersect it with watersheds, then merge it with the previous sheds made above. arcpy.FeatureToPoint_management(fc, os.path.join(lakes, "center" + name), "INSIDE") center = os.path.join(lakes, "center" + name) arcpy.SelectLayerByLocation_management(watersheds_lyr, "INTERSECT", center, '', "NEW_SELECTION") arcpy.CopyFeatures_management(watersheds_lyr, os.path.join(lakes, "ownshed" + name))
def STEP6_calc_barriers(): """Detects influential barriers given CWD calculations from s3_calcCwds.py. """ try: arcpy.CheckOutExtension("spatial") lu.dashline(0) gprint('Running script ' + _SCRIPT_NAME) if cfg.BARRIER_CWD_THRESH is not None: lu.dashline(1) gprint('Invoking CWD Threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.') if cfg.SUM_BARRIERS: sumSuffix = '_Sum' cfg.BARRIERBASEDIR = cfg.BARRIERBASEDIR + sumSuffix baseName, extension = path.splitext(cfg.BARRIERGDB) cfg.BARRIERGDB = baseName + sumSuffix + extension gprint('\nBarrier scores will be SUMMED across core pairs.') else: sumSuffix = '' # Delete contents of final ouptut geodatabase # lu.clean_out_workspace(cfg.BARRIERGDB) #xxx try not doing this to allow multiple radii to be analyzed in separate runs if not arcpy.Exists(cfg.BARRIERGDB): # Create output geodatabase arcpy.CreateFileGDB_management(cfg.OUTPUTDIR, path.basename(cfg.BARRIERGDB)) startRadius = int(cfg.STARTRADIUS) endRadius = int(cfg.ENDRADIUS) radiusStep = int(cfg.RADIUSSTEP) if radiusStep == 0: endRadius = startRadius # Calculate at just one radius value radiusStep = 1 linkTableFile = lu.get_prev_step_link_table(step=6) arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR arcpy.RefreshCatalog(cfg.PROJECTDIR) PREFIX = path.basename(cfg.PROJECTDIR) # For speed: arcpy.env.pyramid = "NONE" arcpy.env.rasterStatistics = "NONE" # set the analysis extent and cell size to that of the resistance # surface arcpy.OverWriteOutput = True arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST arcpy.env.snapRaster = cfg.RESRAST spatialref = arcpy.Describe(cfg.RESRAST).spatialReference mapUnits = (str(spatialref.linearUnitName)).lower() if len(mapUnits) > 1 and mapUnits[-1] != 's': mapUnits = mapUnits + 's' if float(arcpy.env.cellSize) > startRadius or startRadius > endRadius: msg = ('Error: minimum detection radius must be greater than ' 'cell size (' + str(arcpy.env.cellSize) + ') \nand less than or equal to maximum detection radius.') lu.raise_error(msg) linkTable = lu.load_link_table(linkTableFile) numLinks = linkTable.shape[0] numCorridorLinks = lu.report_links(linkTable) if numCorridorLinks == 0: lu.dashline(1) msg = ('\nThere are no linkages. Bailing.') lu.raise_error(msg) # set up directories for barrier and barrier mosaic grids dirCount = 0 gprint("Creating intermediate output folder: " + cfg.BARRIERBASEDIR) lu.delete_dir(cfg.BARRIERBASEDIR) lu.create_dir(cfg.BARRIERBASEDIR) arcpy.CreateFolder_management(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM) cbarrierdir = path.join(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM) coresToProcess = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) maxCoreNum = max(coresToProcess) # Set up focal directories. # To keep there from being > 100 grids in any one directory, # outputs are written to: # barrier\focalX_ for cores 1-99 at radius X # barrier\focalX_1 for cores 100-199 # etc. lu.dashline(0) for radius in range(startRadius, endRadius + 1, radiusStep): core1path = lu.get_focal_path(1, radius) path1, dir1 = path.split(core1path) path2, dir2 = path.split(path1) arcpy.CreateFolder_management(path.dirname(path2), path.basename(path2)) arcpy.CreateFolder_management(path.dirname(path1), path.basename(path1)) if maxCoreNum > 99: gprint('Creating subdirectories for ' + str(radius) + ' ' + str(mapUnits) + ' radius analysis scale.') maxDirCount = int(maxCoreNum / 100) focalDirBaseName = dir2 cp100 = (coresToProcess.astype('int32')) / 100 ind = npy.where(cp100 > 0) dirNums = npy.unique(cp100[ind]) for dirNum in dirNums: focalDir = focalDirBaseName + str(dirNum) gprint('...' + focalDir) arcpy.CreateFolder_management(path2, focalDir) # Create resistance raster with filled-in Nodata values for later use arcpy.env.extent = cfg.RESRAST resistFillRaster = path.join(cfg.SCRATCHDIR, "resist_fill") output = arcpy.sa.Con(IsNull(cfg.RESRAST), 1000000000, Raster(cfg.RESRAST) - 1) output.save(resistFillRaster) coreList = linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1] coreList = npy.sort(coreList) # Loop through each search radius to calculate barriers in each link import time radId = 0 #keep track of number of radii processed- used for temp dir naming for radius in range(startRadius, endRadius + 1, radiusStep): radId = radId + 1 linkTableTemp = linkTable.copy() @retry(10) #can't pass vars in and modify them. def doRadiusLoop(): linkTable = linkTableTemp.copy() startTime = time.clock() randomerror() linkLoop = 0 pctDone = 0 gprint('\nMapping barriers at a radius of ' + str(radius) + ' ' + str(mapUnits)) if cfg.SUM_BARRIERS: gprint('using SUM method') else: gprint('using MAXIMUM method') if numCorridorLinks > 1: gprint('0 percent done') lastMosaicRaster = None lastMosaicRasterPct = None for x in range(0, numLinks): pctDone = lu.report_pct_done(linkLoop, numCorridorLinks, pctDone) linkId = str(int(linkTable[x, cfg.LTB_LINKID])) if ((linkTable[x, cfg.LTB_LINKTYPE] > 0) and (linkTable[x, cfg.LTB_LINKTYPE] < 1000)): linkLoop = linkLoop + 1 # source and target cores corex = int(coreList[x, 0]) corey = int(coreList[x, 1]) # Get cwd rasters for source and target cores cwdRaster1 = lu.get_cwd_path(corex) cwdRaster2 = lu.get_cwd_path(corey) # Mask out areas above CWD threshold cwdTemp1 = None cwdTemp2 = None if cfg.BARRIER_CWD_THRESH is not None: if x == 1: lu.dashline(1) gprint(' Using CWD threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.') arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST arcpy.env.snapRaster = cfg.RESRAST cwdTemp1 = path.join(cfg.SCRATCHDIR, "tmp" + str(corex)) outCon = arcpy.sa.Con( cwdRaster1 < float(cfg.BARRIER_CWD_THRESH), cwdRaster1) outCon.save(cwdTemp1) cwdRaster1 = cwdTemp1 cwdTemp2 = path.join(cfg.SCRATCHDIR, "tmp" + str(corey)) outCon = arcpy.sa.Con( cwdRaster2 < float(cfg.BARRIER_CWD_THRESH), cwdRaster2) outCon.save(cwdTemp2) cwdRaster2 = cwdTemp2 focalRaster1 = lu.get_focal_path(corex, radius) focalRaster2 = lu.get_focal_path(corey, radius) link = lu.get_links_from_core_pairs( linkTable, corex, corey) lcDist = float(linkTable[link, cfg.LTB_CWDIST]) # Detect barriers at radius using neighborhood stats # Create the Neighborhood Object innerRadius = radius - 1 outerRadius = radius dia = 2 * radius InNeighborhood = ("ANNULUS " + str(innerRadius) + " " + str(outerRadius) + " MAP") @retry(10) def execFocal(): randomerror() # Execute FocalStatistics if not path.exists(focalRaster1): arcpy.env.extent = cwdRaster1 outFocalStats = arcpy.sa.FocalStatistics( cwdRaster1, InNeighborhood, "MINIMUM", "DATA") if setCoresToNull: outFocalStats2 = arcpy.sa.Con( outFocalStats > 0, outFocalStats ) # Set areas overlapping cores to NoData xxx outFocalStats2.save(focalRaster1) #xxx else: outFocalStats.save(focalRaster1) #xxx arcpy.env.extent = cfg.RESRAST if not path.exists(focalRaster2): arcpy.env.extent = cwdRaster2 outFocalStats = arcpy.sa.FocalStatistics( cwdRaster2, InNeighborhood, "MINIMUM", "DATA") if setCoresToNull: outFocalStats2 = arcpy.sa.Con( outFocalStats > 0, outFocalStats ) # Set areas overlapping cores to NoData xxx outFocalStats2.save(focalRaster2) #xxx else: outFocalStats.save(focalRaster2) #xxx arcpy.env.extent = cfg.RESRAST execFocal() lu.delete_data(cwdTemp1) lu.delete_data(cwdTemp2) barrierRaster = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + '.tif') if cfg.SUM_BARRIERS: # Need to set nulls to 0, also # create trim rasters as we go outRas = ((lcDist - Raster(focalRaster1) - Raster(focalRaster2) - dia) / dia) outCon = arcpy.sa.Con(IsNull(outRas), 0, outRas) outCon2 = arcpy.sa.Con(outCon < 0, 0, outCon) outCon2.save(barrierRaster) # Execute FocalStatistics to fill out search radii InNeighborhood = "CIRCLE " + str( outerRadius) + " MAP" fillRaster = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_fill.tif") outFocalStats = arcpy.sa.FocalStatistics( barrierRaster, InNeighborhood, "MAXIMUM", "DATA") outFocalStats.save(fillRaster) if cfg.WRITE_TRIM_RASTERS: trmRaster = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_trim.tif") rasterList = [fillRaster, resistFillRaster] outCellStatistics = arcpy.sa.CellStatistics( rasterList, "MINIMUM") outCellStatistics.save(trmRaster) else: #Calculate potential benefit per map unit restored @retry(10) def calcBen(): randomerror() outRas = ((lcDist - Raster(focalRaster1) - Raster(focalRaster2) - dia) / dia) outRas.save(barrierRaster) calcBen() if cfg.WRITE_PCT_RASTERS: #Calculate PERCENT potential benefit per unit restored barrierRasterPct = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + '_pct.tif') @retry(10) def calcBenPct(): randomerror() outras = (100 * (Raster(barrierRaster) / lcDist)) outras.save(barrierRasterPct) calcBenPct() # Mosaic barrier results across core area pairs mosaicDir = path.join( cfg.SCRATCHDIR, 'mos' + str(radId) + '_' + str(x + 1)) lu.create_dir(mosaicDir) mosFN = 'mos_temp' tempMosaicRaster = path.join(mosaicDir, mosFN) tempMosaicRasterTrim = path.join( mosaicDir, 'mos_temp_trm') arcpy.env.workspace = mosaicDir if linkLoop == 1: #If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management( barrierRaster, tempMosaicRaster) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: arcpy.CopyRaster_management( trmRaster, tempMosaicRasterTrim) else: if cfg.SUM_BARRIERS: outCon = arcpy.sa.Con( Raster(barrierRaster) < 0, lastMosaicRaster, Raster(barrierRaster) + Raster(lastMosaicRaster)) outCon.save(tempMosaicRaster) if cfg.WRITE_TRIM_RASTERS: outCon = arcpy.sa.Con( Raster(trmRaster) < 0, lastMosaicRasterTrim, Raster(trmRaster) + Raster(lastMosaicRasterTrim)) outCon.save(tempMosaicRasterTrim) else: rasterString = ('"' + barrierRaster + ";" + lastMosaicRaster + '"') @retry(10) def mosaicToNew(): randomerror() arcpy.MosaicToNewRaster_management( rasterString, mosaicDir, mosFN, "", "32_BIT_FLOAT", arcpy.env.cellSize, "1", "MAXIMUM", "MATCH") mosaicToNew() # gprint(str(corex)+'0'+str(corey)) if linkLoop > 1: #Clean up from previous loop lu.delete_data(lastMosaicRaster) lastMosaicDir = path.dirname(lastMosaicRaster) lu.clean_out_workspace(lastMosaicDir) lu.delete_dir(lastMosaicDir) lastMosaicRaster = tempMosaicRaster if cfg.WRITE_TRIM_RASTERS: lastMosaicRasterTrim = tempMosaicRasterTrim if cfg.WRITE_PCT_RASTERS: mosPctFN = 'mos_temp_pct' mosaicDirPct = path.join( cfg.SCRATCHDIR, 'mosP' + str(radId) + '_' + str(x + 1)) lu.create_dir(mosaicDirPct) tempMosaicRasterPct = path.join( mosaicDirPct, mosPctFN) if linkLoop == 1: # If this is the first grid then copy # rather than mosaic if cfg.SUM_BARRIERS: outCon = arcpy.sa.Con( Raster(barrierRasterPct) < 0, 0, arcpy.sa.Con(IsNull(barrierRasterPct), 0, barrierRasterPct)) outCon.save(tempMosaicRasterPct) else: arcpy.CopyRaster_management( barrierRasterPct, tempMosaicRasterPct) else: if cfg.SUM_BARRIERS: @retry(10) def sumBarriers(): randomerror() outCon = arcpy.sa.Con( Raster(barrierRasterPct) < 0, lastMosaicRasterPct, Raster(barrierRasterPct) + Raster(lastMosaicRasterPct)) outCon.save(tempMosaicRasterPct) sumBarriers() else: rasterString = ('"' + barrierRasterPct + ";" + lastMosaicRasterPct + '"') @retry(10) def maxBarriers(): randomerror() arcpy.MosaicToNewRaster_management( rasterString, mosaicDirPct, mosPctFN, "", "32_BIT_FLOAT", arcpy.env.cellSize, "1", "MAXIMUM", "MATCH") maxBarriers() if linkLoop > 1: #Clean up from previous loop lu.delete_data(lastMosaicRasterPct) lastMosaicDirPct = path.dirname( lastMosaicRasterPct) lu.clean_out_workspace(lastMosaicDirPct) lu.delete_dir(lastMosaicDirPct) # lu.delete_data(lastMosaicRasterPct) lastMosaicRasterPct = tempMosaicRasterPct if not cfg.SAVEBARRIERRASTERS: lu.delete_data(barrierRaster) if cfg.WRITE_PCT_RASTERS: lu.delete_data(barrierRasterPct) if cfg.WRITE_TRIM_RASTERS: lu.delete_data(trmRaster) # Temporarily disable links in linktable - # don't want to mosaic them twice for y in range(x + 1, numLinks): corex1 = int(coreList[y, 0]) corey1 = int(coreList[y, 1]) if corex1 == corex and corey1 == corey: linkTable[y, cfg.LTB_LINKTYPE] = ( linkTable[y, cfg.LTB_LINKTYPE] + 1000) elif corex1 == corey and corey1 == corex: linkTable[y, cfg.LTB_LINKTYPE] = ( linkTable[y, cfg.LTB_LINKTYPE] + 1000) if numCorridorLinks > 1 and pctDone < 100: gprint('100 percent done') gprint('Summarizing barrier data for search radius.') #rows that were temporarily disabled rows = npy.where(linkTable[:, cfg.LTB_LINKTYPE] > 1000) linkTable[rows, cfg.LTB_LINKTYPE] = ( linkTable[rows, cfg.LTB_LINKTYPE] - 1000) # ----------------------------------------------------------------- # Set negative values to null or zero and write geodatabase. mosaicFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" + str(radius)) mosaicRaster = path.join(cfg.BARRIERGDB, mosaicFN) arcpy.env.extent = cfg.RESRAST # if setCoresToNull: # outCon = arcpy.sa.Con(Raster(tempMosaicRaster) < 0, 0, # tempMosaicRaster) #xxx # outCon.save(mosaicRaster) #xxx # else: outSetNull = arcpy.sa.SetNull(tempMosaicRaster, tempMosaicRaster, "VALUE < 0") #xxx orig outSetNull.save(mosaicRaster) lu.delete_data(tempMosaicRaster) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: mosaicFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix + "_Rad" + str(radius)) mosaicRasterTrim = path.join(cfg.BARRIERGDB, mosaicFN) arcpy.CopyRaster_management(tempMosaicRasterTrim, mosaicRasterTrim) lu.delete_data(tempMosaicRaster) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster mosaicPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix + "_Rad" + str(radius)) arcpy.env.extent = cfg.RESRAST outSetNull = arcpy.sa.SetNull(tempMosaicRasterPct, tempMosaicRasterPct, "VALUE < 0") mosaicRasterPct = path.join(cfg.BARRIERGDB, mosaicPctFN) outSetNull.save(mosaicRasterPct) lu.delete_data(tempMosaicRasterPct) # 'Grow out' maximum restoration gain to # neighborhood size for display InNeighborhood = "CIRCLE " + str(outerRadius) + " MAP" # Execute FocalStatistics fillRasterFN = "barriers_fill" + str(outerRadius) + tif fillRaster = path.join(cfg.BARRIERBASEDIR, fillRasterFN) outFocalStats = arcpy.sa.FocalStatistics( mosaicRaster, InNeighborhood, "MAXIMUM", "DATA") outFocalStats.save(fillRaster) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster fillRasterPctFN = "barriers_fill_pct" + str( outerRadius) + tif fillRasterPct = path.join(cfg.BARRIERBASEDIR, fillRasterPctFN) outFocalStats = arcpy.sa.FocalStatistics( mosaicRasterPct, InNeighborhood, "MAXIMUM", "DATA") outFocalStats.save(fillRasterPct) #Place copies of filled rasters in output geodatabase arcpy.env.workspace = cfg.BARRIERGDB fillRasterFN = (PREFIX + "_BarrrierCircles" + sumSuffix + "_Rad" + str(outerRadius)) arcpy.CopyRaster_management(fillRaster, fillRasterFN) if cfg.WRITE_PCT_RASTERS: fillRasterPctFN = (PREFIX + "_BarrrierCircles_Pct" + sumSuffix + "_Rad" + str(outerRadius)) arcpy.CopyRaster_management(fillRasterPct, fillRasterPctFN) if not cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: # Create pared-down version of filled raster- remove pixels # that don't need restoring by allowing a pixel to only # contribute its resistance value to restoration gain outRasterFN = "barriers_trm" + str(outerRadius) + tif outRaster = path.join(cfg.BARRIERBASEDIR, outRasterFN) rasterList = [fillRaster, resistFillRaster] outCellStatistics = arcpy.sa.CellStatistics( rasterList, "MINIMUM") outCellStatistics.save(outRaster) #SECOND ROUND TO CLIP BY DATA VALUES IN BARRIER RASTER outRaster2FN = ("barriers_trm" + sumSuffix + str(outerRadius) + "_2" + tif) outRaster2 = path.join(cfg.BARRIERBASEDIR, outRaster2FN) output = arcpy.sa.Con(IsNull(fillRaster), fillRaster, outRaster) output.save(outRaster2) outRasterFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix + "_Rad" + str(outerRadius)) outRasterPath = path.join(cfg.BARRIERGDB, outRasterFN) arcpy.CopyRaster_management(outRaster2, outRasterFN) randomerror() startTime = lu.elapsed_time(startTime) # Call the above function doRadiusLoop() # Combine rasters across radii gprint('\nCreating summary rasters...') if startRadius != endRadius: radiiSuffix = ('_Rad' + str(int(startRadius)) + 'To' + str(int(endRadius)) + 'Step' + str(int(radiusStep))) mosaicFN = "bar_radii" mosaicPctFN = "bar_radii_pct" arcpy.env.workspace = cfg.BARRIERBASEDIR for radius in range(startRadius, endRadius + 1, radiusStep): #Fixme: run speed test with gdb mosaicking above and here radiusFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" + str(radius)) radiusRaster = path.join(cfg.BARRIERGDB, radiusFN) if radius == startRadius: #If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management(radiusRaster, mosaicFN) else: mosaicRaster = path.join(cfg.BARRIERBASEDIR, mosaicFN) arcpy.Mosaic_management(radiusRaster, mosaicRaster, "MAXIMUM", "MATCH") if cfg.WRITE_PCT_RASTERS: radiusPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix + "_Rad" + str(radius)) radiusRasterPct = path.join(cfg.BARRIERGDB, radiusPctFN) if radius == startRadius: #If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management(radiusRasterPct, mosaicPctFN) else: mosaicRasterPct = path.join(cfg.BARRIERBASEDIR, mosaicPctFN) arcpy.Mosaic_management(radiusRasterPct, mosaicRasterPct, "MAXIMUM", "MATCH") # Copy results to output geodatabase arcpy.env.workspace = cfg.BARRIERGDB mosaicFN = PREFIX + "_BarrierCenters" + sumSuffix + radiiSuffix arcpy.CopyRaster_management(mosaicRaster, mosaicFN) if cfg.WRITE_PCT_RASTERS: mosaicPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix + radiiSuffix) arcpy.CopyRaster_management(mosaicRasterPct, mosaicPctFN) #GROWN OUT rasters fillMosaicFN = "barriers_radii_fill" + tif fillMosaicPctFN = "barriers_radii_fill_pct" + tif fillMosaicRaster = path.join(cfg.BARRIERBASEDIR, fillMosaicFN) fillMosaicRasterPct = path.join(cfg.BARRIERBASEDIR, fillMosaicPctFN) arcpy.env.workspace = cfg.BARRIERBASEDIR for radius in range(startRadius, endRadius + 1, radiusStep): radiusFN = "barriers_fill" + str(radius) + tif #fixme- do this when only a single radius too radiusRaster = path.join(cfg.BARRIERBASEDIR, radiusFN) if radius == startRadius: #If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management(radiusRaster, fillMosaicFN) else: arcpy.Mosaic_management(radiusRaster, fillMosaicRaster, "MAXIMUM", "MATCH") if cfg.WRITE_PCT_RASTERS: radiusPctFN = "barriers_fill_pct" + str(radius) + tif #fixme- do this when only a single radius too radiusRasterPct = path.join(cfg.BARRIERBASEDIR, radiusPctFN) if radius == startRadius: #If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management(radiusRasterPct, fillMosaicPctFN) else: arcpy.Mosaic_management(radiusRasterPct, fillMosaicRasterPct, "MAXIMUM", "MATCH") # Copy result to output geodatabase arcpy.env.workspace = cfg.BARRIERGDB fillMosaicFN = PREFIX + "_BarrierCircles" + sumSuffix + radiiSuffix arcpy.CopyRaster_management(fillMosaicRaster, fillMosaicFN) if cfg.WRITE_PCT_RASTERS: fillMosaicPctFN = (PREFIX + "_BarrierCircles_Pct" + sumSuffix + radiiSuffix) arcpy.CopyRaster_management(fillMosaicRasterPct, fillMosaicPctFN) # if not cfg.SUM_BARRIERS: #GROWN OUT AND TRIMMED rasters (Can't do percent) if cfg.WRITE_TRIM_RASTERS: trimMosaicFN = "bar_radii_trm" arcpy.env.workspace = cfg.BARRIERBASEDIR trimMosaicRaster = path.join(cfg.BARRIERBASEDIR, trimMosaicFN) for radius in range(startRadius, endRadius + 1, radiusStep): radiusFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix + "_Rad" + str(radius)) #fixme- do this when only a single radius too radiusRaster = path.join(cfg.BARRIERGDB, radiusFN) if radius == startRadius: #If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management(radiusRaster, trimMosaicFN) else: arcpy.Mosaic_management(radiusRaster, trimMosaicRaster, "MAXIMUM", "MATCH") # Copy result to output geodatabase arcpy.env.workspace = cfg.BARRIERGDB trimMosaicFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix + radiiSuffix) arcpy.CopyRaster_management(trimMosaicRaster, trimMosaicFN) if not cfg.SAVE_RADIUS_RASTERS: arcpy.env.workspace = cfg.BARRIERGDB rasters = arcpy.ListRasters() for raster in rasters: if 'rad' in raster.lower() and not 'step' in raster.lower(): lu.delete_data(raster) arcpy.env.workspace = cfg.BARRIERGDB rasters = arcpy.ListRasters() for raster in rasters: gprint('\nBuilding output statistics and pyramids\n' 'for raster ' + raster) lu.build_stats(raster) #Clean up temporary files and directories if not cfg.SAVEBARRIERRASTERS: lu.delete_dir(cbarrierdir) lu.delete_dir(cfg.BARRIERBASEDIR) if not cfg.SAVEFOCALRASTERS: for radius in range(startRadius, endRadius + 1, radiusStep): core1path = lu.get_focal_path(1, radius) path1, dir1 = path.split(core1path) path2, dir2 = path.split(path1) lu.delete_dir(path2) # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint('****Failed in step 6. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 6. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return
def execute(request): """Clips selected search results using the clip geometry. :param request: json as a dict. """ clipped = 0 errors = 0 skipped = 0 global result_count parameters = request['params'] # Retrieve the clip features. clip_features = task_utils.get_parameter_value(parameters, 'clip_features', 'value') # Retrieve the coordinate system code. out_coordinate_system = int(task_utils.get_parameter_value(parameters, 'output_projection', 'code')) # Retrieve the output format, create mxd and output file name parameter values. out_format = task_utils.get_parameter_value(parameters, 'output_format', 'value') create_mxd = task_utils.get_parameter_value(parameters, 'create_mxd', 'value') output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name', 'value') if not output_file_name: output_file_name = 'clip_results' # Create the temporary workspace if clip_feature_class: out_workspace = os.path.join(request['folder'], 'temp') if not os.path.exists(out_workspace): os.makedirs(out_workspace) # Set the output coordinate system. if not out_coordinate_system == 0: # Same as Input out_sr = task_utils.get_spatial_reference(out_coordinate_system) arcpy.env.outputCoordinateSystem = out_sr # Set the output workspace. status_writer.send_status(_('Setting the output workspace...')) if not out_format == 'SHP': out_workspace = arcpy.CreateFileGDB_management(out_workspace, 'output.gdb').getOutput(0) arcpy.env.workspace = out_workspace # Query the index for results in groups of 25. headers = {'x-access-token': task_utils.get_security_token(request['owner'])} result_count, response_index = task_utils.get_result_count(parameters) query_index = task_utils.QueryIndex(parameters[response_index]) fl = query_index.fl # Get the Clip features by id. id = clip_features['id'] clip_query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', "&fl=id,path,fullpath:[absolute],absolute_path:[absolute],[lyrFile],[geo]&q=id:{0}".format(id)) clip_result = requests.get(clip_query, verify=verify_ssl, headers=headers) clipper = clip_result.json()['response']['docs'][0] if 'absolute_path' in clipper and not clipper['absolute_path'].startswith('s3'): clip_features = clipper['absolute_path'] elif '[lyrFile]' in clipper: clip_features = clipper['[lyrFile]'] elif '[geo]' in clipper: clip_features = arcpy.AsShape(clipper['[geo]']).projectAs(arcpy.SpatialReference(4326)) elif 'absolute_path' in clipper and clipper['absolute_path'].startswith('s3'): base_name = os.path.basename(clipper['path']) temp_folder = tempfile.mkdtemp() if '[downloadURL]' in clipper: download = os.path.join(temp_folder, os.path.basename(clipper['[downloadURL]'])) response = requests.get(clipper['[downloadURL]'], verify=verify_ssl) with open(download, 'wb') as fp: fp.write(response.content) if download.endswith('.zip'): zip = zipfile.ZipFile(download) zip.extractall(temp_folder) clip_features = os.path.join(temp_folder, base_name) else: clip_features = download else: bbox = clipper['bbox'].split() extent = arcpy.Extent(*bbox) pt_array = arcpy.Array([extent.lowerLeft, extent.upperLeft, extent.upperRight, extent.lowerRight]) clip_features = arcpy.Polygon(pt_array, 4326) query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl) fq = query_index.get_fq() if fq: groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, '') query += fq elif 'ids' in parameters[response_index]: groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '') else: groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, '') status_writer.send_percent(0.0, _('Starting to process...'), 'clip_data') for group in groups: if fq: results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), verify=verify_ssl, headers=headers) elif 'ids' in parameters[response_index]: results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), verify=verify_ssl, headers=headers) else: results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), verify=verify_ssl, headers=headers) docs = results.json()['response']['docs'] input_items = task_utils.get_input_items(docs) if not input_items: input_items = task_utils.get_input_items(parameters[response_index]['response']['docs']) input_rows = collections.defaultdict(list) for doc in docs: if 'path' not in doc: input_rows[doc['name']].append(doc) if input_rows: result = clip_data(input_rows, out_workspace, clip_features, out_format) clipped += result[0] errors += result[1] skipped += result[2] if input_items: result = clip_data(input_items, out_workspace, clip_features, out_format) clipped += result[0] errors += result[1] skipped += result[2] if not input_items and not input_rows: status_writer.send_state(status.STAT_FAILED, _('No items to process. Check if items exist.')) return if arcpy.env.workspace.endswith('.gdb'): out_workspace = os.path.dirname(arcpy.env.workspace) if clipped > 0: try: if out_format == 'MPK': mxd_template = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'supportfiles', 'MapTemplate.mxd') mxd = task_utils.create_mxd(out_workspace, mxd_template, 'output') status_writer.send_status(_("Packaging results...")) task_utils.create_mpk(out_workspace, mxd, files_to_package) shutil.move(os.path.join(out_workspace, 'output.mpk'), os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name))) elif out_format == 'LPK': status_writer.send_status(_("Packaging results...")) task_utils.create_lpk(out_workspace, output_file_name, files_to_package) elif out_format == 'KML': task_utils.convert_to_kml(os.path.join(out_workspace, "output.gdb")) arcpy.env.workspace = '' arcpy.RefreshCatalog(os.path.join(out_workspace, "output.gdb")) try: arcpy.Delete_management(os.path.join(out_workspace, "output.gdb")) except arcpy.ExecuteError: pass zip_file = task_utils.zip_data(out_workspace, '{0}.zip'.format(output_file_name)) shutil.move(zip_file, os.path.join(os.path.dirname(out_workspace), os.path.basename(zip_file))) else: if create_mxd: mxd_template = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'supportfiles', 'MapTemplate.mxd') task_utils.create_mxd(out_workspace, mxd_template, 'output') zip_file = task_utils.zip_data(out_workspace, '{0}.zip'.format(output_file_name)) shutil.move(zip_file, os.path.join(os.path.dirname(out_workspace), os.path.basename(zip_file))) except arcpy.ExecuteError as ee: status_writer.send_state(status.STAT_FAILED, _(ee)) sys.exit(1) else: status_writer.send_state(status.STAT_FAILED, _('No output created. Zero inputs were clipped.')) # Update state if necessary. if errors > 0 or skipped > 0: status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(errors + skipped)) task_utils.report(os.path.join(request['folder'], '__report.json'), clipped, skipped, errors, errors_reasons, skipped_reasons)
def clip(): arcpy.env.workspace = nhd arcpy.RefreshCatalog(nhd) arcpy.ResetEnvironments() # Burnt and walled mosaiced elevation raster = burnt_ned # Create a feature dataset in NHD file geodatabase named "HUC8_Albers" in Albers projection workspace = arcpy.ListWorkspaces("*", "FileGDB") sr = arcpy.SpatialReference() sr.factoryCode = 102039 sr.create() arcpy.env.outputCoordinateSystem = sr arcpy.env.compression = "None" arcpy.env.pyramid = "NONE" arcpy.CreateFeatureDataset_management(arcpy.env.workspace, "HUC8_Albers", sr) # HUC8 polygon selected automaticly from input workspace inhuc8 = "WBD_HU8" inhuc8albers = "WBD_HU8_Albers" # Project WBD_HU8 to Albers srin = arcpy.SpatialReference() srin.factoryCode = 4269 srin.create() arcpy.Project_management(inhuc8, "HUC8_Albers\WBD_HU8_Albers", sr, '', srin) # Output goes to feature dataset HUC8_Albers outfd = "HUC8_Albers" # Splits HUC8 into individual feature classes for each polygon arcpy.AddField_management("WBD_HU8_Albers", "Label", "TEXT") arcpy.RefreshCatalog(nhd) calcexp = '"HUC" + !HUC_8!' arcpy.CalculateField_management("WBD_HU8_Albers", "Label", calcexp, "PYTHON") if not os.path.exists(os.path.join(outfolder, "cliptemp")): os.mkdir(os.path.join(outfolder, "cliptemp")) cliptemp = os.path.join(outfolder, "cliptemp") arcpy.FeatureClassToShapefile_conversion("WBD_HU8_Albers", cliptemp) wbdshp = os.path.join(cliptemp, "WBD_HU8_Albers.shp") arcpy.Split_analysis(wbdshp, wbdshp, "Label", outfd, '') shutil.rmtree(cliptemp) # Buffer HUC8 feature classes by 5000m fcs = arcpy.ListFeatureClasses("", "Polygon", "HUC8_Albers") for fc in fcs: arcpy.Buffer_analysis(fc, outfd + "\\" + fc + "_buffer", "5000 meters") arcpy.RefreshCatalog(nhd) arcpy.ResetEnvironments() # Clips rasters fcs = arcpy.ListFeatureClasses("*_buffer", "Polygon", "HUC8_Albers") for fc in fcs: arcpy.env.compression = "None" arcpy.env.pyramid = "NONE" fcshort = fc[3:11] arcpy.Clip_management( raster, '', outfolder + "\\" + "huc8clips" + nhdsubregion + "\\" + "NED" + fcshort + ".tif", fc, "0", "ClippingGeometry") return
def main(argv=None): success = True if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: gzSupport.addMessage("Looking for drawings modified since " + since) minTime = dt.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p") cadFiles = getFileList(cadFolder, cadExt, minTime) if len(cadFiles) > 0: progBar = len(cadFiles) + 1 arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1) deleteExistingRows(datasets) arcpy.SetProgressorPosition() for item in cadFiles: cadPath = item[0] cadName = item[1] gzSupport.addMessage("Importing Drawing " + cadName) for dataset in datasets: try: name = dataset.getAttributeNode("sourceName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue arcpy.SetProgressorLabel("Loading " + name + " for " + cadName + "...") arcpy.env.Workspace = gzSupport.workspace targetName = dataset.getAttributeNode("targetName").nodeValue sourceWorkspace = os.path.join(cadPath, cadName) if not arcpy.Exists( os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage( os.path.join(gzSupport.workspace, targetName) + " does not exist") mode = "export" else: mode = "import" try: if mode == "import": retVal = gzSupport.importDataset( sourceWorkspace, name, targetName, dataset) elif mode == "export": retVal = gzSupport.exportDataset( sourceWorkspace, name, targetName, dataset) #retVal = importLayer(cadPath,cadName,dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False arcpy.env.Workspace = gzSupport.workspace gzSupport.logDatasetProcess(cadName, name, retVal) gzSupport.cleanupGarbage() arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("", "", False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
) mem = "in_memory" arcpy.env.workspace = mem arcpy.AddMessage("Dissolving mask...") arcpy.Dissolve_management(maskpoly, os.path.join(mem, "mask")) mask = "mask" arcpy.AddMessage("Clipping zones...") arcpy.Clip_analysis(zones, mask, os.path.join(mem, "clip")) clip = "clip" arcpy.AddMessage("Adding Hectare fields...") try: arcpy.AddField_management(clip, "Ha_In_" + suffix, "DOUBLE") except: pass try: arcpy.AddField_management(zones, "Ha", "DOUBLE") except: pass exp = '!shape.area@hectares!' arcpy.CalculateField_management(clip, "Ha_In_" + suffix, exp, "PYTHON") arcpy.CalculateField_management(zones, "Ha", exp, "PYTHON") arcpy.JoinField_management(zones, idfield, clip, idfield, "Ha_In_" + suffix) arcpy.AddMessage("Calculating percentages...") arcpy.RefreshCatalog(zones) arcpy.RefreshCatalog(mem) hain = "Ha_In_" + suffix pct = '!%s! / !Ha! * 100' % (hain) arcpy.CalculateField_management(zones, "Pct_In_" + suffix, pct, "PYTHON") arcpy.AddMessage("Finished.")
workspace, outname, runoff_m3, str(id_day)) # writing the runoff into the result table # deletes the calculated rasters above if selected if not id_yesterday == start: delete_raster(check_pet, check_aet, check_p, check_r, check_s, parameter_day, id_yesterday) id_yesterday = id_day # memory for the id form the previous day; necessary to delete the rasterdatasets arcpy.AddMessage( time.strftime("%H:%M:%S: ") + "Fertig mit der Berechnung des {0}.{1}.{2}".format( day, month, year)) del cursor # deleting the rasters of the first day of the current variable combination delete_raster(check_pet, check_aet, check_p, check_r, check_s, parameter_day, start) # deleting the rasters of the last day of the previous variable combination if not parameter_yesterday == parameter_day: delete_raster(check_pet, check_aet, check_p, check_r, check_s, parameter_yesterday, end) parameter_yesterday = parameter_day # memory for the value of the last combination of variables arcpy.AddMessage( time.strftime("%H:%M:%S: ") + "Fertig mit c={}".format(c[y])) arcpy.AddMessage( time.strftime("%H:%M:%S: ") + "Fertig mit rp={}".format(rp_factor[z])) # deleting the rasters of the last day of the last variable combination delete_raster(check_pet, check_aet, check_p, check_r, check_s, parameter_day, end) arcpy.RefreshCatalog(workspace) arcpy.AddMessage(time.strftime("%H:%M:%S: ") + "Modellierung abgeschlossen.")
def getFieldValues(mode, fields, datasets): # get a list of field values, returns all values and the unique values. theValues = [] # unique list of values theDiff = [] # all values for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue table = os.path.join(workspace, name) desc = arcpy.Describe(table) try: cursor = arcpy.SearchCursor(table) row = cursor.next() except Exception, ErrorDesc: printMsg("Unable to read the Dataset, Python error is: ") msg = str(getTraceback(Exception, ErrorDesc)) printMsg(msg[msg.find("Error Info:"):]) row = None numFeat = int(arcpy.GetCount_management(table).getOutput(0)) addMessageLocal(table + ", " + str(numFeat) + " (get " + mode + ") features") progressUpdate = 1 i = 0 if numFeat > 100: progressUpdate = numFeat / 100 arcpy.SetProgressor("Step", "Getting " + mode + " values...", 0, numFeat, progressUpdate) attrs = [f.name for f in arcpy.ListFields(table)] if row is not None: while row: i += 1 if i % progressUpdate == 0: arcpy.SetProgressorPosition(i) try: for field in fields: if field in attrs: currentValue = row.getValue(field) if mode.upper() == "UNIQUE": if currentValue != None: try: theValues.index( currentValue ) # if the current value is present theDiff.append( currentValue ) # add to the list of differences if it is found except: theValues.append( currentValue ) # else add the value if the first check fails. elif mode.upper() == "ALL": theValues.append(currentValue) except: err = "Exception caught: unable to get field values" addError(err) logProcessError(row.getValue(field), sourceIDField, row.getValue(sourceIDField), "Cannot read", err) theValues = [] row = cursor.next() del cursor arcpy.RefreshCatalog(table)
arcpy.Delete_management(DEMtemp) except: pass # ------------------------------------------------------------------------------------------------ Compact FGDB try: arcpy.Compact_management(watershedGDB_path) AddMsgAndPrint( "\nSuccessfully Compacted FGDB: " + os.path.basename(watershedGDB_path), 0) except: pass # ------------------------------------------------------------------------------------------------ FIN! AddMsgAndPrint("\nProcessing Complete!\n", 0) # -------------------------------------------------------------------- Cleanup arcpy.RefreshCatalog(userWorkspace) # Restore environment settings arcpy.env.extent = tempExtent arcpy.env.mask = tempMask arcpy.env.snapRaster = tempSnapRaster arcpy.env.outputCoordinateSystem = tempCoordSys # ----------------------------------------------------------------------------------------------------------------- except SystemExit: pass except KeyboardInterrupt: AddMsgAndPrint("Interruption requested....exiting")
for df in arcpy.mapping.ListDataFrames(mxd): for lyr in arcpy.mapping.ListLayers(mxd, "", df): if lyr.name == AOIname: arcpy.mapping.RemoveLayer(df, lyr) del mxd # Prep for proper layer file labels importing as determined by slope type selected to be run if slopeType == "PERCENT_RISE": arcpy.SetParameterAsText(5, projectAOI) else: arcpy.SetParameterAsText(6, projectAOI) AddMsgAndPrint("\nAdding " + str(aoiOut) + " to ArcMap", 0) AddMsgAndPrint("\n", 0) # ------------------------------------------------------------------------------------------------ Cleanup arcpy.RefreshCatalog(watershedGDB_path) ## # Restore original environments ## gp.extent = tempExtent ## gp.mask = tempMask ## gp.SnapRaster = tempSnapRaster ## gp.CellSize = tempCellSize ## gp.OutputCoordinateSystem = tempCoordSys except SystemExit: pass except KeyboardInterrupt: AddMsgAndPrint("Interruption requested....exiting") except:
# Call SDM Export script aliasName = tileName + " " + str(theTile) bExported = SSURGO_Convert_to_Geodatabase.gSSURGO( inputFolder, surveyList, outputWS, theAOI, (aliasName, aliasName), useTextFiles) if bExported: exportList.append(os.path.basename(outputWS)) else: err = "gSSURGO export failed for " + fldName + " value: " + str( theTile) raise MyError, err # end of for loop arcpy.RefreshCatalog(outputFolder) del outputFolder PrintMsg( " \nFinished creating the following gSSURGO databases: " + ", ".join(exportList) + " \n ", 0) except MyError, e: # Example: raise MyError, "This is an error message" PrintMsg(str(e) + " \n", 2) except: errorMsg()
if sys.argv[8] == 'true': addLTYPE = True else: addLTYPE = False except: addLTYPE = False try: if sys.argv[9] == 'true': addConfs = True else: addConfs = False except: addConfs = False # create personal gdb in output directory and run main routine if createDatabase(outputDir, thisDB): thisDB = outputDir + '/' + thisDB arcpy.RefreshCatalog(thisDB) main(thisDB, coordSystem, nCrossSections) # try to write a readme within the .gdb if thisDB[-4:] == '.gdb': try: writeLogfile(thisDB, 'Geodatabase created by ' + versionString) except: addMsgAndPrint('Failed to write to' + thisDB + '/00log.txt') else: addMsgAndPrint(usage)
return frame # Main # Create new empty polygon feature class msg = "\nCreating {} feature class...".format(ddpindex) arcpy.AddMessage(msg) arcpy.CreateFeatureclass_management(outgdb, ddpindex, "POLYGON", "", "DISABLED", "DISABLED", df.spatialReference) msg = "\n{} feature class created".format(ddpindex) arcpy.AddMessage(msg) arcpy.RefreshCatalog(fpath) # Create a polygon (rectangle) from the current map extent newframe = makeFrame(df) # Add the polygon to the new feature class arcpy.CopyFeatures_management(newframe, fpath) # Add the fields to the new feature class for f in fields: arcpy.AddField_management(fpath, f, fdict[f]) # Create a feature layer from the feature class and add it to the map TOC layer = arcpy.mapping.Layer(fpath) arcpy.mapping.AddLayer(df, layer, "TOP")