def doSimplify(input): output = os.path.join(To_GDB, os.path.basename(input)) print "{} => {}".format(input, output) if not arcpy.Exists(output): arcpy.SimplifyPolygon_cartography(input, output, "POINT_REMOVE", "10 Meters", "0 SquareMeters", "NO_CHECK", "NO_KEEP")
def onClick(self): print "Vectorisation de la couche matricielle..." # Process: Raster to Polygon arcpy.RasterToPolygon_conversion("cleaned_raster", "vectorized_raster", "NO_SIMPLIFY", "Value") # Process: Simplify Polygon arcpy.SimplifyPolygon_cartography("vectorized_raster", "simplified_shoreline", "BEND_SIMPLIFY", "4 Meters", "10 SquareMeters", "NO_CHECK", "NO_KEEP") pythonaddins.MessageBox( "Vectorisation et simplification de la couche matricielle terminé!".decode('utf-8').encode('cp1252'), "Vectorisation", "0") real_classes_gc = [[1, u'Vegetation saine'], [2, u'Vegetation fletrie'], [3, u'Sable sec'], [4, u'Sable humide'], [5, u'Laisse de mer']] with arcpy.da.UpdateCursor("simplified_shoreline", "gridcode") as gcc_cursor: for row in gcc_cursor: if row[0] > 0: for i in Ref_TS_Classes: for j in real_classes_gc: if row[0] == i[0]: temp = i[1] if j[1] == temp: row[0] = j[0] gcc_cursor.updateRow(row) else: pass
def LoDGenerator(inputPath, outputPath, simplificationValue): dateTime = datetime.now() logger.info("Generate the LoD " + str(simplificationValue)) outputFilename = nameModify(inputPath, str(simplificationValue) + "m") arcpy.SimplifyPolygon_cartography(inputPath, outputFilename, "POINT_REMOVE", str(simplificationValue) + " Meters", str((simplificationValue*simplificationValue)/4) + " SquareMeters", "RESOLVE_ERRORS", "NO_KEEP", "") arcpyLogger(arcpyVerboseLevel) logger.info("The " + str(simplificationValue) + "m LoD file, generated in " + str(duration(dateTime)) + " is : " + outputFilename) return outputFilename
def Wygladzenie_lini_pow(): arcpy.Dissolve_management( "Rzeka_strum_L", "D:\mgr\M33033CD\Rzeka.shp" ) # polaczenie w jedno aby kolejna funkcja dzialala arcpy.SimplifyLine_cartography("D:\mgr\M33033CD\Rzeka.shp", "D:\mgr\M33033CD\Rzeka_L.shp", "BEND_SIMPLIFY", 200) arcpy.SimplifyPolygon_cartography("Las_A", "D:\mgr\M33033CD\Las.shp", "BEND_SIMPLIFY", 200)
def merge_polygon_simplify(merged_masks, datadir, iso): simp_masks = os.path.join(datadir, iso + "_tcd_merged_mask_simp") arcpy.SimplifyPolygon_cartography(merged_masks, simp_masks, "BEND_SIMPLIFY", "1 Meters", "100 Hectares") #simp_masks_dis = os.path.join(datadir, iso + "_tcd_merged_mask_simp_dis") #arcpy.Dissolve_management(simp_masks, simp_masks_dis, dissolve_field="", statistics_fields="", multi_part="MULTI_PART, unsplit_lines="DISSOLVE_LINES") #simp_masks_dis_int = os.path.join(datadir, iso + "_tcd_merged_mask_simp_dis_int") #arcpy.Intersect_analysis([simp_masks_dis, tile_grid],simp_masks_dis_int) return simp_masks
def simplifyPolygons(sourceDataset): # simplify polygons using approach developed by Chris Bus. dla.addMessage("Simplifying (densifying) Geometry") arcpy.Densify_edit(sourceDataset) simplify = sourceDataset + '_simplified' if arcpy.Exists(simplify): arcpy.Delete_management(simplify) if arcpy.Exists(simplify + '_Pnt'): arcpy.Delete_management(simplify + '_Pnt') arcpy.SimplifyPolygon_cartography(sourceDataset, simplify, "POINT_REMOVE", "1 Meters") return simplify
def prep_source_fc(self): simplified_fc = self.data_source + '_simplified' logging.debug("Starting simplify_polygon") arcpy.SimplifyPolygon_cartography(self.data_source, simplified_fc, algorithm="POINT_REMOVE", tolerance="10 Meters", minimum_area="0 Unknown", error_option="NO_CHECK", collapsed_point_option="NO_KEEP") self.data_source = simplified_fc
def Step3SimplifyPolygon(Simplification_Tolerance, Input_Barrier_Layers, Output_Aggregate, Output_Simplify, Simplification_Algorithm="POINT_REMOVE", Minimum_Area_2_="0 Unknown", Handling_Topological_Errors="RESOLVE_ERRORS", Keep_collapsed_points=True): # Step3SimplifyPolygon # To allow overwriting outputs change overwriteOutput option to True. arcpy.env.overwriteOutput = False # Process: Simplify Polygon (Simplify Polygon) output_feature_class_Pnt = \ arcpy.SimplifyPolygon_cartography(in_features=Output_Aggregate, out_feature_class=Output_Simplify, algorithm=Simplification_Algorithm, tolerance=Simplification_Tolerance, minimum_area=Minimum_Area_2_, error_option=Handling_Topological_Errors, collapsed_point_option=Keep_collapsed_points, in_barriers=Input_Barrier_Layers)[0]
def better_contour(inputclass, outputclass): print("inputclass", inputclass) print("outputclass", outputclass) arcpy.Merge_management(inputclass, "in_memory/after_merge") merger_all("in_memory/after_merge", "in_memory/after_diss_all") arcpy.Delete_management("in_memory/after_merge") print("merge all") arcpy.EliminatePolygonPart_management("in_memory/after_diss_all", "in_memory/after_eli", "AREA", 1000000, part_option="CONTAINED_ONLY") arcpy.Delete_management("in_memory/after_diss_all") print("create contour") arcpy.SimplifyPolygon_cartography("in_memory/after_eli", outputclass, algorithm="POINT_REMOVE", tolerance=1, error_option="NO_CHECK", collapsed_point_option="NO_KEEP") print("complete")
### Initialization # load libraries import arcpy import toml import sys ### Preliminary processing # load parameters with open("code/parameters/general.toml") as conffile: general_params = toml.loads(conffile.read()) with open("code/parameters/simplify.toml") as conffile: simplify_params = toml.loads(conffile.read()) # set environmental variables arcpy.env.parallelProcessingFactor = general_params['threads'] arcpy.env.overwriteOutput = True ### Main processing arcpy.SimplifyPolygon_cartography(sys.argv[1], sys.argv[2], simplify_params['algorithm'], simplify_params['tolerance'], simplify_params['minimum_area'], simplify_params['error_option'], simplify_params['collapsed_point_option'])
def merge_polygon_simplify(merged_masks, datadir, iso): simp_masks = os.path.join(datadir, iso + "_tcd_mask.shp") arcpy.SimplifyPolygon_cartography(merged_masks, simp_masks, "BEND_SIMPLIFY", "500 Meters", "100 Hectares") return simp_masks
def createVectorBoundaryC(f_path, f_name, raster_props, stat_out_folder, vector_bound_path, minZ, maxZ, bound_path, elev_type): a = datetime.now() arcpy.AddMessage("\tCreating {} bound for '{}' using min z '{}' and max z'{}'".format(elev_type, f_path, minZ, maxZ)) vector_1_bound_path = os.path.join(stat_out_folder, "B1_{}.shp".format(f_name)) vector_2_bound_path = os.path.join(stat_out_folder, "B2_{}.shp".format(f_name)) vector_3_bound_path = os.path.join(stat_out_folder, "B3_{}.shp".format(f_name)) vector_4_bound_path = os.path.join(stat_out_folder, "B4_{}.shp".format(f_name)) vector_5_bound_path = os.path.join(stat_out_folder, "B5_{}.shp".format(f_name)) deleteFileIfExists(vector_bound_path, useArcpy=True) deleteFileIfExists(vector_1_bound_path, useArcpy=True) deleteFileIfExists(vector_2_bound_path, useArcpy=True) deleteFileIfExists(vector_3_bound_path, useArcpy=True) deleteFileIfExists(vector_4_bound_path, useArcpy=True) deleteFileIfExists(vector_5_bound_path, useArcpy=True) arcpy.RasterDomain_3d(in_raster=f_path, out_feature_class=vector_5_bound_path, out_geometry_type="POLYGON") Utility.addToolMessages() arcpy.MultipartToSinglepart_management(in_features=vector_5_bound_path, out_feature_class=vector_4_bound_path) Utility.addToolMessages() checkRecordCount(vector_4_bound_path) arcpy.EliminatePolygonPart_management(in_features=vector_4_bound_path, out_feature_class=vector_3_bound_path, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY") Utility.addToolMessages() checkRecordCount(vector_3_bound_path) arcpy.SimplifyPolygon_cartography( in_features=vector_3_bound_path, out_feature_class=vector_2_bound_path, algorithm="POINT_REMOVE", tolerance="{} Meters".format(C_SIMPLE_DIST), minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="" ) Utility.addToolMessages() checkRecordCount(vector_2_bound_path) arcpy.AddMessage('ZFlag: ' + arcpy.env.outputZFlag) arcpy.AddMessage('MFlag: ' + arcpy.env.outputMFlag) arcpy.Dissolve_management(in_features=vector_2_bound_path, out_feature_class=vector_1_bound_path, dissolve_field="", statistics_fields="", multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES") Utility.addToolMessages() checkRecordCount(vector_1_bound_path) deleteFields(vector_1_bound_path) record_count = checkRecordCount(vector_1_bound_path) footprint_area = 0 for row in arcpy.da.SearchCursor(vector_1_bound_path, ["SHAPE@"]): # @UndefinedVariable shape = row[0] footprint_area = shape.getArea ("PRESERVE_SHAPE", "SQUAREMETERS") if footprint_area <= 0: arcpy.AddMessage("\tWARNGING: Area is 0 in {} '{}' bound '{}'".format(elev_type, f_path, vector_bound_path)) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[PATH][0], field_alias=FIELD_INFO[PATH][1], field_type=FIELD_INFO[PATH][2], field_length=FIELD_INFO[PATH][3]) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[NAME][0], field_alias=FIELD_INFO[NAME][1], field_type=FIELD_INFO[NAME][2], field_length=FIELD_INFO[NAME][3]) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[AREA][0], field_alias=FIELD_INFO[AREA][1], field_type=FIELD_INFO[AREA][2], field_length=FIELD_INFO[AREA][3]) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[ELEV_TYPE][0], field_alias=FIELD_INFO[ELEV_TYPE][1], field_type=FIELD_INFO[ELEV_TYPE][2], field_length=FIELD_INFO[ELEV_TYPE][3]) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[RANGE][0], field_alias=FIELD_INFO[RANGE][1], field_type=FIELD_INFO[RANGE][2], field_length=FIELD_INFO[RANGE][3]) deleteFields(vector_1_bound_path) arcpy.AddMessage(raster_props) for field_name in KEY_LIST: time.sleep(0.25) field_shpname = FIELD_INFO[field_name][0] field_alias = FIELD_INFO[field_name][1] field_type = FIELD_INFO[field_name][2] field_length = FIELD_INFO[field_name][3] field_value = raster_props[field_name] if field_type == "TEXT": if str(field_value).endswith('\\'): field_value = str(field_value)[0:-1] field_value = r'"{}"'.format(field_value) addField(in_table=vector_1_bound_path, field_name=field_shpname, field_alias=field_alias, field_type=field_type, field_length=field_length, expression=field_value) b_f_path, b_f_name = os.path.split(f_path) b_f_name = os.path.splitext(b_f_name)[0] arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[PATH][0], expression='"{}"'.format(b_f_path), expression_type="PYTHON_9.3") arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[NAME][0], expression='"{}"'.format(b_f_name), expression_type="PYTHON_9.3") arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[AREA][0], expression=footprint_area, expression_type="PYTHON_9.3") arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[ELEV_TYPE][0], expression='"{}"'.format(elev_type), expression_type="PYTHON_9.3") try: z_expr = "!{}! - !{}!".format(FIELD_INFO[MAX][0], FIELD_INFO[MIN][0]) arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[RANGE][0], expression=z_expr, expression_type="PYTHON_9.3") except: pass deleteFileIfExists(vector_bound_path, True) arcpy.Clip_analysis(in_features=vector_1_bound_path, clip_features=bound_path, out_feature_class=vector_bound_path, cluster_tolerance="") Utility.addToolMessages() checkRecordCount(vector_bound_path) deleteFields(vector_bound_path) #debug = False #try: # debug = (str(f_path).find("alamazoo") >= 0) #except: # debug = False #if not debug: deleteFileIfExists(vector_1_bound_path, useArcpy=True) deleteFileIfExists(vector_2_bound_path, useArcpy=True) deleteFileIfExists(vector_3_bound_path, useArcpy=True) deleteFileIfExists(vector_4_bound_path, useArcpy=True) deleteFileIfExists(vector_5_bound_path, useArcpy=True) #else: # arcpy.AddMessage("\tleaving artifacts for {} '{}'".format(elev_type, vector_bound_path)) doTime(a, "\tCreated BOUND {}".format(vector_bound_path))
del feature,features # --- Merge arcpy.Merge_management(inputs="Class1_Erase_Class3;Class2_Erase_Class3;Class3_Eliminate", output="KAST_Merge", field_mappings='ORIG_FID "ORIG_FID" true true false 10 Long 0 10 ,First,#,Class1_Erase_Class3,ORIG_FID,-1,-1;Class "Class" true true false 10 Long 0 10 ,First,#,Class1_Erase_Class3,ORIG_FID,-1,-1,Class2_Erase_Class3,ORIG_FID,-1,-1,Class3_Eliminate,ORIG_FID,-1,-1;Id "Id" true true false 10 Long 0 10 ,First,#,Class2_Erase_Class3,Id,-1,-1,Class3_Eliminate,Id,-1,-1') print ("Classes Is Merged To One Shapefile") # --- Dissolves Unnecessary Fields In Attribute Table arcpy.Dissolve_management(in_features="KAST_Merge", out_feature_class=OutputRAW, dissolve_field="Class", statistics_fields="", multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES") print ("Dissolve Completed") # --- Eliminate Polygon Part (removes clusters smaller than 1000 kvm) arcpy.EliminatePolygonPart_management(in_features=OutputRAW, out_feature_class="KASTEPP", condition="AREA", part_area="1000 SquareMeters", part_area_percent="0", part_option="ANY") print ("Eliminate Polygons < 1000 kvm Completed") # --- Simplify to make it more efficient for being loaded onto a web server arcpy.SimplifyPolygon_cartography(in_features="KASTEPP", out_feature_class=Output, algorithm="POINT_REMOVE", tolerance="10 Meters", minimum_area="0 SquareMeters", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="") print ("Simplified") print ("AutoKAST COMPLETED") except: ErrorHandling(LogFile) sys.exit(1)
# simplifyOops1.py # Purpose: Create simplified polygons for all of the valid polygon files # and print a warning if it fails for one of the files. # Usage: No arguments needed. # Warning: This script contains an error! import arcpy, os arcpy.env.workspace = 'C:/Users/owner/Downloads/Sample_scripts/ch07/' arcpy.env.overwriteOutput = True outDir = 'C:/Users/owner/Downloads/Sample_scripts/ch07/chichui/' if not os.path.exists(outDir): os.mkdir(outDir) fcs = arcpy.ListFeatureClasses() for fc in fcs: try: count = arcpy.SimplifyPolygon_cartography(fc, outDir + fc, '#', 100) print('{0} has been simplified.'.format(fc)) except arcpy.ExecuteError: print(arcpy.GetMessages())
def main(config_file, *args): # Set overwrite output option to True arcpy.env.overwriteOutput = True if isfile(config_file): config = ConfigParser.ConfigParser() config.read(config_file) else: print "INI file not found." sys.exit() # Config File username = config.get('AGOL', 'user') password = config.get('AGOL', 'pass') localparcels = config.get('LOCAL_DATA', 'localparcels') CommunityParcelsLocalCopy = config.get('LOCAL_DATA', 'communityparcelslocalcopy') createCurrent = config.get('LOCAL_DATA', 'createcurrent') reportCurrentURL = config.get('FS_INFO', 'featureserviceurl') deleteSQL = config.get('FS_INFO', 'deletesql') countycityname = config.get('LOCAL_DATA', 'localfips') print "Loading Configuration File" arcpy.AddMessage("Loading Configuration File") if CommunityParcelsLocalCopy == "": print "Please specify a input community parcel layer (CommunityParcelsLocalCopy=) in the configuration file, exiting" arcpy.AddMessage( "Please specify a input parcel layer in the configuration file, exiting" ) sys.exit() if username == "": print "Please specify a ArcGIS Online Username (username =)in the configuration file, exiting" arcpy.AddMessage(username) sys.exit() if password == "": print "Please specify a ArcGIS Online password (password =)in the configuration file, exiting" arcpy.AddMessage(password) sys.exit() if deleteSQL == "": print "Please specify a SQL query (DELETESQL= LOCALFIPS ='jurisdiction') in the configuration file, exiting" arcpy.AddMessage( "Please specify a SQL query (DELETESQL= LOCALFIPS ='jurisdiction') in the configuration file, exiting" ) sys.exit() fs = layer.FeatureLayer(url=reportCurrentURL, username=username, password=password) if fs == None: print "Cannot find or connect to service, make sure service is accessible" arcpy.AddMessage( "Cannot find or connect to service, make sure service is accessible" ) sys.exit() # Update Current service if used - see the ArcREST folder in the application directory arcpy.management.TruncateTable(localparcels) print "Cleaning up local parcel data" arcpy.AddMessage("Cleaning up local parcels") if createCurrent == "True": fs.url = reportCurrentURL arcpy.Append_management(CommunityParcelsLocalCopy, localparcels, "TEST") print "Mapping Local Parcel data to Community Parcel Schema" arcpy.AddMessage( "Mapping Local Parcel data to Community Parcel Schema") arcpy.Densify_edit(localparcels) simplify = "{}temp".format(localparcels) arcpy.SimplifyPolygon_cartography(localparcels, simplify, "POINT_REMOVE", "1 Meters") print "Simplifying (densifying) Parcel Geometry" arcpy.AddMessage("Simplifying (densifying) Parcel Geometry") print "Truncating Parcels from Feature Service" arcpy.AddMessage("Truncating Parcels from Feature Service") print "Community Parcel update started, please be patient" arcpy.AddMessage("Community Parcel update started, please be patient") try: value1 = fs.query(where=deleteSQL, returnIDsOnly=True) myids = value1['objectIds'] minId = min(myids) i = 0 maxId = max(myids) print minId print maxId chunkSize = 500 while (i < len(myids)): # print myids[i:i+1000] oids = ",".join(str(e) for e in myids[i:i + chunkSize]) print oids if oids == '': continue else: fs.deleteFeatures(objectIds=oids) i += chunkSize if i > len(myids): i = len(myids) print "Deleted: {0}%".format(int(((i / float(len(myids)) * 100)))) arcpy.AddMessage("Deleted: {0}%".format( int(((i / float(len(myids)) * 100))))) except: pass print "Community Parcels upload Started" arcpy.AddMessage( "Community Parcels upload started, please be patient. For future consideration, please run tool during non-peak internet usage" ) arcpy.env.overwriteOutput = True inDesc = arcpy.Describe(simplify) oidName = arcpy.AddFieldDelimiters(simplify, inDesc.oidFieldName) sql = '%s = (select min(%s) from %s)' % (oidName, oidName, os.path.basename(simplify)) cur = arcpy.da.SearchCursor(simplify, [inDesc.oidFieldName], sql) minOID = cur.next()[0] del cur, sql sql = '%s = (select max(%s) from %s)' % (oidName, oidName, os.path.basename(simplify)) cur = arcpy.da.SearchCursor(simplify, [inDesc.oidFieldName], sql) maxOID = cur.next()[0] del cur, sql breaks = range(minOID, maxOID)[0:-1:100] #2K slices breaks.append(maxOID + 1) exprList = [oidName + ' >= ' + str(breaks[b]) + ' and ' + \ oidName + ' < ' + str(breaks[b+1]) for b in range(len(breaks)-1)] for expr in exprList: UploadLayer = arcpy.MakeFeatureLayer_management( simplify, 'TEMPCOPY', expr).getOutput(0) fs.addFeatures(UploadLayer) arcpy.Delete_management(simplify)
def main(config_file, *args): # Set overwrite output option to True arcpy.env.overwriteOutput = True if isfile(config_file): config = ConfigParser.ConfigParser() config.read(config_file) else: print "INI file not found." sys.exit() # Config File username = config.get('AGOL', 'user') password = config.get('AGOL', 'pass') LocalParcels = config.get('LOCAL_DATA', 'localparcels') CommunityParcelsLocalCopy = config.get('LOCAL_DATA', 'communityparcelslocalcopy') createCurrent = config.get('LOCAL_DATA', 'createcurrent') reportCurrentURL = config.get('FS_INFO', 'featureserviceurl') deleteSQL = config.get('FS_INFO', 'deletesql') countycityname = config.get('LOCAL_DATA', 'localfips') LowParcelID = config.get('FIELD_MAPPER', 'lowparcelid') ParcelID = config.get('FIELD_MAPPER', 'parcelid') FloorDesignator = config.get('FIELD_MAPPER', 'floordesignator') StatedArea = config.get('FIELD_MAPPER', 'statedarea') ConveyanceName = config.get('FIELD_MAPPER', 'conveyancename') UseCode = config.get('FIELD_MAPPER', 'usecode') UseDescription = config.get('FIELD_MAPPER', 'usedescription') TaxUseDescription = config.get('FIELD_MAPPER', 'taxusedescription') Improved = config.get('FIELD_MAPPER', 'improved') Owntype = config.get('FIELD_MAPPER', 'owntype') SiteAddress = config.get('FIELD_MAPPER', 'siteaddress') Ownername1 = config.get('FIELD_MAPPER', 'ownername1') Ownername2 = config.get('FIELD_MAPPER', 'ownername2') PostalAddress = config.get('FIELD_MAPPER', 'postaladdress') USPSBox = config.get('FIELD_MAPPER', 'uspsbox') State = config.get('FIELD_MAPPER', 'state') City = config.get('FIELD_MAPPER', 'city') Zip = config.get('FIELD_MAPPER', 'zip') InternationalAddress = config.get('FIELD_MAPPER', 'internationaladdress') TaxableValue = config.get('FIELD_MAPPER', 'taxablevalue') SalePrice = config.get('FIELD_MAPPER', 'saleprice') SaleDate = config.get('FIELD_MAPPER', 'saledate') LocalFIPS = config.get('FIELD_MAPPER', 'localfips') StateFIPS = config.get('FIELD_MAPPER', 'statefips') GNISID = config.get('FIELD_MAPPER', 'gnisid') LastEditor = config.get('FIELD_MAPPER', 'lasteditor') LastUpdate = config.get('FIELD_MAPPER', 'lastupdate') ##SHAPE_Length = config.get ('FIELD_MAPPER', 'SHAPE_Length') ##SHAPE_Area = config.get ('FIELD_MAPPER', 'SHAPE_Area') ImprovedValue = config.get('FIELD_MAPPER', 'improvedvalue') LandValue = config.get('FIELD_MAPPER', 'landvalue') AssessedValue = config.get('FIELD_MAPPER', 'assessedvalue') print "Loading Configuration File" arcpy.AddMessage("Loading Configuration File") if arcpy.Exists(LocalParcels) == False: print "Please specify a input parcel feature class (LocalParcels=) in the configuration file, exiting" arcpy.AddMessage( "Please specify a input parcel layer in the configuration file, exiting" ) sys.exit() if CommunityParcelsLocalCopy == "": print "Please specify a input community parcel layer (CommunityParcelsLocalCopy=) in the configuration file, exiting" arcpy.AddMessage( "Please specify a input parcel layer in the configuration file, exiting" ) sys.exit() if username == "": print "Please specify a ArcGIS Online Username (username =)in the configuration file, exiting" arcpy.AddMessage(username) sys.exit() if password == "": print "Please specify a ArcGIS Online password (password =)in the configuration file, exiting" arcpy.AddMessage(password) sys.exit() if deleteSQL == "": print "Please specify a SQL query (DELETESQL= LOCALFIPS ='jurisdiction') in the configuration file, exiting" arcpy.AddMessage( "Please specify a SQL query (DELETESQL= LOCALFIPS ='jurisdiction') in the configuration file, exiting" ) sys.exit() fs = layer.FeatureLayer(url=reportCurrentURL, username=username, password=password) if fs == None: print "Cannot find or connect to service, make sure service is accessible" arcpy.AddMessage( "Cannot find or connect to service, make sure service is accessible" ) sys.exit() # Update Current service if used - see the services helper in the agolhelper folder if createCurrent == "True": fs.url = reportCurrentURL # Delete existing dataset that matches the community parcel schema arcpy.management.TruncateTable(CommunityParcelsLocalCopy) print "Cleaning up local parcel data" # Append new parcels into the community parcels schema, field map your data into the community schema. Add local data field names after the "#" in the list. # For example, for STATEAREA "STATEAREA" true true false 50 Text 0 0 ,First,#,LocalParcels,TotalAcres,-1,-1; The local Parcels field name from STATEDAREA (community parcels schema) is TotalAcres. common_vars = "true true false 50 Text 0 0, First, #" if LowParcelID == "": new_field = """LOWPARCELID 'Low Parcel Identification Number' true true false 50 Text 0 0, First, #""" else: new_field = """LOWPARCELID 'Low Parcel Identification Number' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, LowParcelID) field_map = "{}".format(new_field) if ParcelID == "": new_field = """PARCELID 'Parcel Identification Number' true true false 50 Text 0 0, First, #""" else: new_field = """PARCELID 'Parcel Identification Number' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, ParcelID) field_map = "{}; {}".format(field_map, new_field) if FloorDesignator == "": new_field = """FLOORDESIG 'Floor Designator' true true false 50 Text 0 0, First, #""" else: new_field = """FLOORDESIG 'Floor Designator' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, FloorDesignator) field_map = "{}; {}".format(field_map, new_field) if StatedArea == "": new_field = """STATEAREA 'Stated Area' true true false 50 Text 0 0, First, #""" else: new_field = """STATEAREA 'Stated Area' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, StatedArea) field_map = "{}; {}".format(field_map, new_field) if ConveyanceName == "": new_field = """CNVYNAME 'Sub or Condo Name' true true false 50 Text 0 0, First, #""" else: new_field = """CNVYNAME 'Sub or Condo Name' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, ConveyanceName) field_map = "{}; {}".format(field_map, new_field) if UseCode == "": new_field = """USEDCD 'Parcel Use Code' true true false 50 Text 0 0, First, #""" else: new_field = """USEDCD 'Parcel Use Code' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, UseCode) field_map = "{}; {}".format(field_map, new_field) if UseDescription == "": new_field = """USEDSCRP 'Parcel Use Description' true true false 50 Text 0 0, First, #""" else: new_field = """USEDSCRP 'Parcel Use Description' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, UseDescription) field_map = "{}; {}".format(field_map, new_field) if TaxUseDescription == "": new_field = """CVTTXDSCRP 'Tax District Description' true true false 50 Text 0 0, First, #""" else: new_field = """CVTTXDSCRP 'Tax District Description' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, TaxUseDescription) field_map = "{}; {}".format(field_map, new_field) if Improved == "": new_field = """IMPROVED 'Improved Structure' true true false 50 Text 0 0, First, #""" else: new_field = """IMPROVED 'Improved Structure' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, Improved) field_map = "{}; {}".format(field_map, new_field) if Owntype == "": new_field = """OWNTYPE 'Owner Type' true true false 50 Text 0 0, First, #""" else: new_field = """OWNTYPE 'Owner Type' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, Owntype) field_map = "{}; {}".format(field_map, new_field) if SiteAddress == "": new_field = """SITEADRESS 'Physical Address' true true false 50 Text 0 0, First, #""" else: new_field = """SITEADRESS 'Physical Address' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, SiteAddress) field_map = "{}; {}".format(field_map, new_field) if Ownername1 == "": new_field = """OWNERNME1 'First Owner Name' true true false 50 Text 0 0, First, #""" else: new_field = """OWNERNME1 'First Owner Name' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, Ownername1) field_map = "{}; {}".format(field_map, new_field) if Ownername2 == "": new_field = """OWNERNME2 'Second Owner Name' true true false 50 Text 0 0, First, #""" else: new_field = """OWNERNME2 'Second Owner Name' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, Ownername2) field_map = "{}; {}".format(field_map, new_field) if PostalAddress == "": new_field = """PSTLADRESS 'Mailing Address' true true false 50 Text 0 0, First, #""" else: new_field = """PSTLADRESS 'Mailing Address' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, PostalAddress) field_map = "{}; {}".format(field_map, new_field) if USPSBox == "": new_field = """USPSBOX 'US Postal Box Number' true true false 50 Text 0 0, First, #""" else: new_field = """USPSBOX 'US Postal Box Number' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, USPSBox) field_map = "{}; {}".format(field_map, new_field) if City == "": new_field = """PSTLCITY 'City' true true false 50 Text 0 0, First, #""" else: new_field = """PSTLCITY 'City' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, City) field_map = "{}; {}".format(field_map, new_field) if State == "": new_field = """PSTLSTATE'State' true true false 50 Text 0 0, First, #""" else: new_field = """PSTLSTATE 'State' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, State) field_map = "{}; {}".format(field_map, new_field) if Zip == "": new_field = """PSTLZIP 'Zip Code' true true false 50 Text 0 0, First, #""" else: new_field = """PSTLZIP 'Zip Code' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, Zip) field_map = "{}; {}".format(field_map, new_field) if InternationalAddress == "": new_field = """PSTLINTER 'International Postal Address' true true false 50 Text 0 0, First, #""" else: new_field = """PSTLINTER 'International Postal Address' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, InternationalAddress) field_map = "{}; {}".format(field_map, new_field) if TaxableValue == "": new_field = """CNTTXBLVAL 'Current Taxable Value' true true false 50 Text 0 0, First, #""" else: new_field = """CNTTXBLVAL 'Current Taxable Value' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, TaxableValue) field_map = "{}; {}".format(field_map, new_field) if SalePrice == "": new_field = """SALEPRICE 'Last Sale Price' true true false 50 Text 0 0, First, #""" else: new_field = """SALEPRICE 'Last Sale Price' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, SalePrice) field_map = "{}; {}".format(field_map, new_field) if SaleDate == "": new_field = """SALEDATE 'Last Sale Date' true true false 50 Text 0 0, First, #""" else: new_field = """SALEDATE 'Last Sale Date' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, SaleDate) field_map = "{}; {}".format(field_map, new_field) if LocalFIPS == "": new_field = """LOCALFIPS 'Local FIPS Code' true true false 50 Text 0 0, First, #""" else: new_field = """LOCALFIPS 'Local FIPS Code' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, LocalFIPS) field_map = "{}; {}".format(field_map, new_field) if StateFIPS == "": new_field = """STCOFIPS 'State FIPS Code' true true false 50 Text 0 0, First, #""" else: new_field = """STCOFIPS 'State FIPS Code' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, StateFIPS) field_map = "{}; {}".format(field_map, new_field) if ImprovedValue == "": new_field = """IMPVALUE 'Improved Structure Value' true true false 50 Text 0 0, First, #""" else: new_field = """IMPVALUE 'Improved Structure Value' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, ImprovedValue) field_map = "{}; {}".format(field_map, new_field) if LandValue == "": new_field = """LNDVALUE 'Land Value' true true false 50 Text 0 0, First, #""" else: new_field = """LNDVALUE 'Land Value' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, LandValue) field_map = "{}; {}".format(field_map, new_field) if AssessedValue == "": new_field = """CNTASSDVAL 'Current Assessed Value' true true false 50 Text 0 0, First, #""" else: new_field = """CNTASSDVAL 'Current Assessed Value' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, AssessedValue) field_map = "{}; {}".format(field_map, new_field) if GNISID == "": new_field = """GNISID 'Geographic Names Information System Code' true true false 50 Text 0 0, First, #""" else: new_field = """GNISID 'Geographic Names Information System Code' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, GNISID) field_map = "{}; {}".format(field_map, new_field) if LastEditor == "": new_field = """LASTEDITOR 'Last Editor' true true false 50 Text 0 0, First, #""" else: new_field = """LASTEDITOR 'Last Editor' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, LastEditor) field_map = "{}; {}".format(field_map, new_field) if LastUpdate == "": new_field = """LASTUPDATE 'Last Update' true true false 50 Text 0 0, First, #""" else: new_field = """LASTUPDATE 'Last Update' {}, {}, {}, -1, -1""".format( common_vars, LocalParcels, LastUpdate) field_map = "{}; {}".format(field_map, new_field) ## if SHAPE_Length =="": ## new_field = """SHAPE_Length 'SHAPE_Length' true true false 50 Text 0 0, First, #""" ## ## ## else: ## new_field = """SHAPE_Length 'SHAPE_Length' {}, {}, {}, -1, -1""".format(common_vars, LocalParcels, SHAPE_Length) ## ## ## field_map = "{}; {}".format(field_map, new_field) ## ## ## ## ## if SHAPE_Area =="": ## new_field = """SHAPE_Area 'SHAPE_Area' true true false 50 Text 0 0, First, #""" ## ## ## else: ## new_field = """SHAPE_Area 'SHAPE_Area' {}, {}, {}, -1, -1""".format(common_vars, LocalParcels, SHAPE_Area) ## ## ## field_map = "{}; {}".format(field_map, new_field) arcpy.Append_management(LocalParcels, CommunityParcelsLocalCopy, "NO_TEST", field_map) print "Mapping Local Parcel data to Community Parcel Schema" print "Community Parcel update started, please be patient" arcpy.AddMessage( "Mapping Local Parcel data to Community Parcel Schema") arcpy.AddMessage("Community Parcel update Started, please be patient") # Calculate Last Editor Field calc0 = '"{0}"'.format(username) arcpy.CalculateField_management(CommunityParcelsLocalCopy, "LASTEDITOR", calc0) print "Calculating Last Editor" arcpy.AddMessage("Calculating Last Editor") # Calculate the Last Update field arcpy.CalculateField_management(CommunityParcelsLocalCopy, "LASTUPDATE", "time.strftime(\"%m/%d/%Y\")", "PYTHON", "") print "Calculating Last Update " arcpy.AddMessage("Calculating Last Update") # Calculate the LOCALFIPS to the County/City Name calc = '"{0}"'.format(countycityname) arcpy.CalculateField_management(CommunityParcelsLocalCopy, "LOCALFIPS", calc, "VB", "") print "Set FIPS Code information" arcpy.AddMessage("Calculating 'FIPS' Code Information") #Calculate improved information arcpy.CalculateField_management( CommunityParcelsLocalCopy, "IMPROVED", "improve", "VB", "Dim improve\\nIf [IMPVALUE] > 1 Then\\nimprove = \"YES\"\\n\\nelse\\nimprove = \"NO\"\\n\\nend if\\n" ) print "Calculating Improved Structure information" arcpy.AddMessage("Calculating Improved Structure information") #Densify FC to support JSON conversion arcpy.AddMessage("Simplifying (densifying) Parcel Geometry") arcpy.Densify_edit(CommunityParcelsLocalCopy) simplify = "{}temp".format(CommunityParcelsLocalCopy) arcpy.SimplifyPolygon_cartography(CommunityParcelsLocalCopy, simplify, "POINT_REMOVE", "1 Meters") print "Truncating Parcels from Feature Service" arcpy.AddMessage("Truncating Parcels from Feature Service") try: value1 = fs.query(where=deleteSQL, returnIDsOnly=True) myids = value1['objectIds'] minId = min(myids) i = 0 maxId = max(myids) print minId print maxId chunkSize = 500 while (i < len(myids)): # print myids[i:i+1000] oids = ",".join(str(e) for e in myids[i:i + chunkSize]) print oids if oids == '': continue else: fs.deleteFeatures(objectIds=oids) i += chunkSize if i > len(myids): i = len(myids) print "Deleted: {0}%".format(int(((i / float(len(myids)) * 100)))) arcpy.AddMessage("Deleted: {0}%".format( int(((i / float(len(myids)) * 100))))) except: pass print "Community Parcels upload Started" arcpy.AddMessage( "Community Parcels upload started, please be patient. For future consideration, please run tool during non-peak internet usage" ) arcpy.env.overwriteOutput = True inDesc = arcpy.Describe(simplify) oidName = arcpy.AddFieldDelimiters(simplify, inDesc.oidFieldName) sql = '%s = (select min(%s) from %s)' % (oidName, oidName, os.path.basename(simplify)) cur = arcpy.da.SearchCursor(simplify, [inDesc.oidFieldName], sql) minOID = cur.next()[0] del cur, sql sql = '%s = (select max(%s) from %s)' % (oidName, oidName, os.path.basename(simplify)) cur = arcpy.da.SearchCursor(simplify, [inDesc.oidFieldName], sql) maxOID = cur.next()[0] del cur, sql breaks = range(minOID, maxOID)[0:-1:100] #2K slices breaks.append(maxOID + 1) exprList = [oidName + ' >= ' + str(breaks[b]) + ' and ' + \ oidName + ' < ' + str(breaks[b+1]) for b in range(len(breaks)-1)] for expr in exprList: UploadLayer = arcpy.MakeFeatureLayer_management( simplify, 'TEMPCOPY', expr).getOutput(0) fs.addFeatures(UploadLayer) arcpy.Delete_management(simplify)
def processAuthoritativeData(self): self.logger.info('Process hurricane data') positions = self.posJson['features'] tracks = self.trackJson['features'] #forecastErrorCones = forecastErrorConeJson['features'] forecastPositions = self.forecastPositionJson['features'] #group all of the storm reports together under a common storm name self.logger.info('grouping strom reports under common name') stormReports = {} for k, v in groupby(positions, key=lambda x: x['attributes']['STORMNAME'][:]): stormReports[k] = list(v) #group all of the forecast reports together under a common storm name self.logger.info('grouping forecast reports under common name') forecastReports = {} for k, v in groupby(forecastPositions, key=lambda x: x['attributes']['STORMNAME'][:]): forecastReports[k] = list(v) #Get the most recent observed position for the storm self.logger.info('get most recent observed positions') mostRecentPositions = {} firstPositions = {} firstForecasts = {} for stormName in stormReports: seq = [ str(x['attributes']['DTG']) for x in stormReports[stormName] ] index, value = max(enumerate(seq)) mostRecentPositions[stormName] = stormReports[stormName][index] index, value = min(enumerate(seq)) firstPositions[stormName] = stormReports[stormName][index] #Get the first forecast report with a date/time greater than the most recent observed position if forecastReports[stormName] is not None and len( forecastReports[stormName]) > 0: mostRecentDT = self.getMostRecentDateTime( mostRecentPositions[stormName]['attributes']) seq = sorted([ self.getForecastValidDateTime(x['attributes']) for x in forecastReports[stormName] ]) self.logger.info(seq) for forecastDT in seq: self.logger.info(forecastDT + " > " + mostRecentDT) if forecastDT > mostRecentDT: firstForecastDT = forecastDT break firstForecast = next((x for x in forecastReports[stormName] if self.getForecastValidDateTime( x['attributes']) == firstForecastDT), None) if firstForecast is not None: firstForecasts[stormName] = firstForecast #flatten the set of observed tracks per storm report into a single list self.logger.info( 'flatten observed tracks per storm report into single list') startDates = {} stormTracks = {} for k, v in groupby(tracks, key=lambda x: x['attributes']['STORMNAME'][:]): stormName = k.encode('utf-8') trackData = list(v) startDates[stormName] = trackData[0]['attributes']['STARTDTG'] if startDates[stormName] is None: startDates[stormName] = firstPositions[stormName][ 'attributes']['DTG'] stormTracks[stormName] = [ item for sublist in [d['geometry']['paths'] for d in trackData] for item in sublist ] #form 100 mile buffer around observed path self.logger.info('form 100 mile buffer around observed path') stormPathPolygons = {} stormPathPolylines = {} for stormName in stormTracks: pathPolylines = [] pathBuffers = [] for path in stormTracks[stormName]: pathPolyline = arcpy.Polyline( arcpy.Array([arcpy.Point(*coords) for coords in path]), arcpy.SpatialReference(3857)) bufferedPath = pathPolyline.buffer(self.metersPerMile * 250) #100 mile buffer pathPolylines.append(pathPolyline) pathBuffers.append(bufferedPath) stormPathPolylines[stormName] = pathPolylines stormPathPolygons[stormName] = pathBuffers #Dissolve 100 mile buffer polygons into single polygon per storm self.logger.info( 'dissolve 100 mile buffer polygons into single polygon per storm') stormBufferFeatures = {} arcpy.Delete_management('in_memory') fileCount = 0 for stormName in stormPathPolygons: fileCount += 1 stormPathBufferFC = 'in_memory\\stormPathBuffer_featureClass_' + str( fileCount) simplifiedPathBufferFC = 'in_memory\\simplifiedPathBuffer_featureClass_' + str( fileCount) arcpy.Dissolve_management(stormPathPolygons[stormName], stormPathBufferFC, multi_part=False) arcpy.SimplifyPolygon_cartography(stormPathBufferFC, simplifiedPathBufferFC, tolerance=100) dissolvedFs = arcpy.FeatureSet() dissolvedFs.load(simplifiedPathBufferFC) #arcpy.FeaturesToJSON_conversion(stormPathBufferFC, 'output.json', 'GEOJSON') stormBufferFeatures[stormName] = dissolvedFs #iterate through the storms while posting events to the backend and to portal self.logger.info('post storms to backend and portal') for stormName in mostRecentPositions: position = mostRecentPositions[stormName] posAttr = position['attributes'] posAttr['geometry'] = position['geometry'] if stormName in firstForecasts and firstForecasts[ stormName] is not None: foreAttr = firstForecasts[stormName]['attributes'] if startDates.has_key(posAttr['STORMNAME']) and startDates[ posAttr['STORMNAME']] is not None: posAttr['STARTDTG'] = startDates[stormName] #initialize event object to be POSTed to the event service try: self.logger.info('create event from hurricane data: ' + posAttr['STORMNAME']) event = LandingPageEvent.LandingPageEvent() event.consumeHurricaneReport(posAttr, foreAttr) self.logger.info( 'success creating event from hurricane data: ' + posAttr['STORMNAME']) except: self.logger.error( 'failed to create event from hurricane data: ' + posAttr['STORMNAME'] + ' exception: ' + str(sys.exc_info()[0])) continue self.logger.info('POST hurricane event to backend: ' + event.title) response = requests.post(self.eventsServiceUrl, data=event.toJSON(), headers=self.requestHeaders) if response.ok: self.logger.info('POST successful for event: ' + event.title) bufferGeometryPolygon = stormBufferFeatures[stormName] bufferGeometryJSON = json.loads( bufferGeometryPolygon.JSON) bufferGeometry = bufferGeometryJSON['features'][0] try: self.logger.info( 'send hurricane data to portal: ' + event.uri) self.portal.upsertHurricaneEventFeatures( response.content, posAttr, foreAttr, bufferGeometry) self.logger.info( 'Hurricane data successfully sent to portal: ' + event.uri) except: self.logger.error( 'failed to send hurricane data to portal: ' + event.uri + ' exception: ' + str(sys.exc_info()[0])) else: self.logger.warn('POST failed for event: ' + event.title)
BuffIn_Dist_str = BuffIn_Dist * -1 Gen_Tol = 1 #m Gen_Tol_str = "1" # Action 01:-------------------------------------------------------------------> #Simplyfy Lakes to reduce the number of vertices (tolerance = 100) print("Process: SimplifyPolygon_cartography") out_path = in_path in_file_1 = in_file_name + ".shp" out_file_name_1 = in_file_name + "_Simplify" + str(SimpTol) out_file_1 = out_file_name_1 + ".shp" shapefile_in = in_path + in_file_1 shapefile_out = out_path + out_file_1 arcpy.SimplifyPolygon_cartography(shapefile_in, shapefile_out, "POINT_REMOVE", SimpTol, "1 SquareMeters", "RESOLVE_ERRORS", "NO_KEEP") # Action 02:-------------------------------------------------------------------> #Remove Interior Lakes (must wait to remove interior lakes until after simplifiy because little lakes could be lost --> arg for why simp should be ~100 (1000 could loose too many little lakes) print("Remove Interior Lakes") out_path = in_path in_file_name_2 = out_file_name_1 in_file_2 = in_file_name_2 + ".shp" out_file_name_2 = in_file_name_2 + "_RemoveIntLakes" out_file_2 = out_file_name_2 + ".shp" shapefile_in = in_path + in_file_2 shapefile_out = out_path + out_file_2 #Create two separate layers from the input shapefile, both have all lakes
def Overview(Input_Geologic_Features, Output_Finished, Aggregation_Distance, Minimum_Area="0 Unknown", Minimum_Hole_Size="0 Unknown", Preserve_orthogonal_shape=False, Barrier_Features, Simplification_Algorithm="POINT_REMOVE", Simplification_Tolerance, Minimum_Area_2_="0 Unknown", Handling_Topological_Errors="RESOLVE_ERRORS", Keep_collapsed_points=True, Input_Barrier_Layers, Smoothing_Algorithm="PAEK", Smoothing_Tolerance, Preserve_endpoint_for_rings=True, Handling_Topological_Errors_2_="NO_CHECK", Input_Barrier_Layers_2_, Distance_value_or_field_, Side_Type="FULL", End_Type="ROUND", Dissolve_Type="NONE", Dissolve_Field_s_, Method="PLANAR", Condition="AREA", Area="0 Unknown", Percentage=0, Eliminate_contained_parts_only=True): # Overview # To allow overwriting outputs change overwriteOutput option to True. arcpy.env.overwriteOutput = False # Process: Union (Union) Output_Union = "" arcpy.Union_analysis(in_features=Input_Geologic_Features, out_feature_class=Output_Union, join_attributes="ALL", cluster_tolerance="", gaps="GAPS") # Process: Multipart To Singlepart (Multipart To Singlepart) Output_Singlepart = "" arcpy.MultipartToSinglepart_management(in_features=Output_Union, out_feature_class=Output_Singlepart) # Process: Aggregate Polygons (Aggregate Polygons) Output_Aggregate = "" arcpy.AggregatePolygons_cartography( in_features=Output_Singlepart, out_feature_class=Output_Aggregate, aggregation_distance=Aggregation_Distance, minimum_area=Minimum_Area, minimum_hole_size=Minimum_Hole_Size, orthogonality_option=Preserve_orthogonal_shape, barrier_features=Barrier_Features, out_table=Output_Table) # Process: Simplify Polygon (Simplify Polygon) Output_Simplify = "" output_feature_class_Pnt = \ arcpy.SimplifyPolygon_cartography(in_features=Output_Aggregate, out_feature_class=Output_Simplify, algorithm=Simplification_Algorithm, tolerance=Simplification_Tolerance, minimum_area=Minimum_Area_2_, error_option=Handling_Topological_Errors, collapsed_point_option=Keep_collapsed_points, in_barriers=Input_Barrier_Layers)[0] # Process: Smooth Polygon (Smooth Polygon) Output_Smooth = "" arcpy.SmoothPolygon_cartography( in_features=Output_Simplify, out_feature_class=Output_Smooth, algorithm=Smoothing_Algorithm, tolerance=Smoothing_Tolerance, endpoint_option=Preserve_endpoint_for_rings, error_option=Handling_Topological_Errors_2_, in_barriers=Input_Barrier_Layers_2_) # Process: Buffer (Buffer) Output_Buffer = "" arcpy.Buffer_analysis(in_features=Output_Smooth, out_feature_class=Output_Buffer, buffer_distance_or_field=Distance_value_or_field_, line_side=Side_Type, line_end_type=End_Type, dissolve_option=Dissolve_Type, dissolve_field=Dissolve_Field_s_, method=Method) # Process: Eliminate Polygon Part (Eliminate Polygon Part) arcpy.EliminatePolygonPart_management( in_features=Output_Buffer, out_feature_class=Output_Finished, condition=Condition, part_area=Area, part_area_percent=Percentage, part_option=Eliminate_contained_parts_only)
# Calculate raster mean within a 1.5 km grid arcpy.AddMessage("Calculating spatial certainty of sample representation...") sample_zonal = FocalStatistics(sample_nowater, NbrRectangle(50, 50, "CELL"), "MEAN", "DATA" ) extract_zonal = ExtractByMask(sample_zonal, area_of_interest) arcpy.CopyRaster_management(extract_zonal, spatial_certainty, "", "", -9999, "NONE", "NONE", "32_BIT_FLOAT", "NONE", "NONE", "TIFF", "NONE") # Resample spatial certainty to 1 km grid arcpy.AddMessage("Resampling spatial certainty to 1 km grid...") arcpy.Resample_management(spatial_certainty, certainty_resample, "1000", "BILINEAR") # Set the values below a threshold to null arcpy.AddMessage("Converting spatial certainty to study area raster...") threshold = int(threshold)/100 resample_null = SetNull(certainty_resample, 1, "VALUE < %f" % threshold) # Convert raster to polygon arcpy.AddMessage("Converting raster to polygon...") arcpy.RasterToPolygon_conversion(resample_null, initial_studyarea, "SIMPLIFY", "VALUE", "SINGLE_OUTER_PART", "") # Simplify the polygon arcpy.AddMessage("Simplifying study area polygon...") arcpy.SimplifyPolygon_cartography(initial_studyarea, simplify_studyarea, "POINT_REMOVE", 1000, 1000000000, "", "NO_KEEP", "") # Smooth the polygon arcpy.AddMessage("Smoothing study area polygon...") arcpy.SmoothPolygon_cartography(simplify_studyarea, study_area, "PAEK", 50000, "FIXED_ENDPOINT", "NO_CHECK") # Delete intermediate files arcpy.Delete_management(certainty_resample) arcpy.Delete_management(initial_studyarea) arcpy.Delete_management(simplify_studyarea)
break # Delete shapefile if indicated if Delete == True: arcpy.Delete_management(inShp) # Close zipfile object ZIP.close() # Return zipfile full path return zipfl arcpy.SimplifyPolygon_cartography(floodboolpoly, 'def1in100_45_boolpoly_simpl.shp', algorithm='BEND_SIMPLIFY', tolerance='20 meters', collapsed_point_option='NO_KEEP') #Create tile layers arcpy.CreateMapTilePackage_management(floodboolpoly, ...) #FATHOM flood zone arcpy.CreateMapTilePackage_management( 'parceltile.mxd', service_type='ONLINE', output_file=os.path.split(parcel_flood_attri)[1] + '_tile', format_type='PNG', level_of_detail=20) #Parcel dataset #Add fields to census tracts computing number of residents living within flood zone for each race/ethnicity nflist = [ 'tract_{}_floodpop'.format(i) for i in ['hisp', 'black', 'white', 'asi']
# get script location pyScript = sys.argv[0] toolDir = os.path.dirname(pyScript) rScriptPath = toolDir + "TBD.r" #Name of script TBD #Subprocess Args rCMD = "R --slave --vanilla --args" args = .join([workspace, trainingPoints, outputFC]) #command to call R cmd = rCMD + args + "<" + rScriptPath #Execute command os.system(cmd) #project output sr = arcpy.SpatialReference ("TBD") #Projection TBD arcpy.management.DefineProjection(outputFC, sr) params = arcpy.gp.GetParameterInfo() #renderFile = os.path.join(toolDir, "TBD.shp") #output name params[2].Symbology = outputFC #create geodatabase and feature classes db = arcpy.CreateFileGDB_management(workspace, "BEC_Automation.gdb") arcpy.FeatureClasstoFeatureClass_conversion (outputFC, db, "orig_BEC") # tolerance and minimum area must still be determined arcpy.SimplifyPolygon_cartography(outputFC, db + "\simp_BEC", "BEND_SIMPLIFY", TBD, TBD, "RESOLVE_ERRORS", "NO_KEEP")
def createBoundaryFeatureClass(raster_footprint, target_raster_boundary, statistics_fields="", alter_field_infos=None): a = datetime.datetime.now() aa = a deleteFields(raster_footprint) lasd_boundary_0 = "{}0".format(target_raster_boundary) lasd_boundary_1 = "{}1".format(target_raster_boundary) deleteFileIfExists(lasd_boundary_0, True) deleteFileIfExists(lasd_boundary_1, True) arcpy.AddMessage("\tMultipart to Singlepart") arcpy.MultipartToSinglepart_management(in_features=raster_footprint, out_feature_class=lasd_boundary_0) Utility.addToolMessages() arcpy.RepairGeometry_management(in_features=lasd_boundary_0, delete_null="DELETE_NULL") deleteFields(lasd_boundary_0) arcpy.AddMessage("\tBuffering") arcpy.Buffer_analysis(in_features=lasd_boundary_0, out_feature_class=lasd_boundary_1, buffer_distance_or_field="10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="NONE", method="PLANAR") Utility.addToolMessages() arcpy.RepairGeometry_management(in_features=lasd_boundary_1, delete_null="DELETE_NULL") deleteFields(lasd_boundary_1) deleteFileIfExists(lasd_boundary_0, True) lasd_boundary_2 = "{}2".format(target_raster_boundary) deleteFileIfExists(lasd_boundary_2, True) arcpy.AddMessage("\tDissolving with statistics: {}".format(statistics_fields)) arcpy.Dissolve_management( in_features=lasd_boundary_1, out_feature_class=lasd_boundary_2, statistics_fields=statistics_fields ) Utility.addToolMessages() arcpy.RepairGeometry_management(in_features=lasd_boundary_2, delete_null="DELETE_NULL") deleteFields(lasd_boundary_2) a = doTime(a, "\tDissolved to {}".format(lasd_boundary_2)) if alter_field_infos is not None: for alter_field_info in alter_field_infos: try: alterField(lasd_boundary_2, alter_field_info[0], alter_field_info[1], alter_field_info[2]) except: pass a = doTime(a, "\tRenamed summary fields") lasd_boundary_3 = "{}3".format(target_raster_boundary) deleteFileIfExists(lasd_boundary_3, True) arcpy.EliminatePolygonPart_management(in_features=lasd_boundary_2, out_feature_class=lasd_boundary_3, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY") arcpy.RepairGeometry_management(in_features=lasd_boundary_3, delete_null="DELETE_NULL") deleteFileIfExists(lasd_boundary_1, True) deleteFields(lasd_boundary_3) lasd_boundary_4 = "{}4".format(target_raster_boundary) deleteFileIfExists(lasd_boundary_4, True) arcpy.SimplifyPolygon_cartography(in_features=lasd_boundary_3, out_feature_class=lasd_boundary_4, algorithm="BEND_SIMPLIFY", tolerance="20 Meters", minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="") arcpy.RepairGeometry_management(in_features=lasd_boundary_4, delete_null="DELETE_NULL") deleteFields(lasd_boundary_4) #try: # arcpy.DeleteField_management(in_table=lasd_boundary_4, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol") #except: # pass deleteFileIfExists(lasd_boundary_3, True) deleteFileIfExists(target_raster_boundary, True) arcpy.Buffer_analysis(in_features=lasd_boundary_4, out_feature_class=target_raster_boundary, buffer_distance_or_field="-10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="ALL", method="PLANAR") arcpy.RepairGeometry_management(in_features=target_raster_boundary, delete_null="DELETE_NULL") deleteFields(target_raster_boundary) deleteFileIfExists(lasd_boundary_4, True) if alter_field_infos is not None and len(alter_field_infos) > 0: fields = ";".join([field[1] for field in alter_field_infos]) arcpy.JoinField_management(in_data=target_raster_boundary, in_field="OBJECTID", join_table=lasd_boundary_2, join_field="OBJECTID", fields=fields) Utility.addToolMessages() deleteFileIfExists(lasd_boundary_2, True) a = doTime(aa, "Dissolved las footprints to dataset boundary {} ".format(target_raster_boundary))
def upsertWildfireEventFeatures(self, event, report, perimeter): eventId = self.getEventId(event) appid = None wildfireQuery = self.getPortalQuery(eventId) if self.portalInfo['useNegotiateAuth'] == True: temp = requests.get(self.wildfireBoundariesUrl + wildfireQuery, verify=False, auth=HttpNegotiateAuth()) else: temp = requests.get(self.wildfireBoundariesUrl + wildfireQuery) wildfireBoundariesFs = arcpy.FeatureSet() wildfireBoundariesFs = arcpy.AsShape(temp.content, True) if wildfireBoundariesFs.JSON is not None: self.logger.info( 'Successfully loaded boundary data from portal for wildfire event: ' + eventId) wildfireBoundariesJson = json.loads(wildfireBoundariesFs.JSON) else: self.logger.error('Unable to load boundary data from portal!') return False arcpy.Delete_management('in_memory') #form a polygon feature class to encompass the new boundary geometry new_geometry = perimeter['geometry'] new_boundary_polygon = arcpy.Polygon( arcpy.Array( [arcpy.Point(*coords) for coords in new_geometry['rings'][0]]), arcpy.SpatialReference(3857)) if len(wildfireBoundariesJson['features']) > 0: #iterate through each of the wildfire boundaries associated with the event; typically there will only be one... boundaries = wildfireBoundariesJson['features'] boundaryCount = 1 for boundary in boundaries: dissolveBoundaryFC = "in_memory\\_wildfireBoundaryDissolved_featureClass_" + str( boundaryCount) simplifyBoundaryFC = "in_memory\\_wildfireBoundarySimplified_featureClass_" + str( boundaryCount) boundaryCount += 1 #form a polygon feature class to encompass the old boundary geometry old_geometry = boundary['geometry'] old_boundary_polygon = arcpy.Polygon( arcpy.Array([ arcpy.Point(*coords) for coords in old_geometry['rings'][0] ]), arcpy.SpatialReference(3857)) #dissolve together the old boundary with the new boundary arcpy.Dissolve_management( [new_boundary_polygon, old_boundary_polygon], dissolveBoundaryFC) dissolveWildfireBoundaryFS = arcpy.FeatureSet() dissolveWildfireBoundaryFS.load(dissolveBoundaryFC) #simplify the merged result arcpy.SimplifyPolygon_cartography(dissolveWildfireBoundaryFS, simplifyBoundaryFC, 'BEND_SIMPLIFY', '5000 Feet') simplifiedWildfireBoundaryFS = arcpy.FeatureSet() simplifiedWildfireBoundaryFS.load(simplifyBoundaryFC) if len(boundaries) > 1: #update the new boundary polygon with the current iteration of dissolved/simplifed boundary data newBoundaryJSON = json.loads( simplifiedWildfireBoundaryFS.JSON) new_geometry = newBoundaryJSON['features'][0]['geometry'] new_boundary_polygon = arcpy.Polygon( arcpy.Array([ arcpy.Point(*coords) for coords in new_geometry['rings'][0] ]), arcpy.SpatialReference(3857)) #delete any old boundary data before adding the updated boundary data appid_temp = boundary['attributes']['appid'] if appid_temp is not None: appid = appid_temp if 'env' in self.portalInfo: if self.portalInfo['env'] == 'D': fid = boundary['attributes']['fid'] deleteResponse = requests.post( self.wildfireBoundariesUrl + self.deleteFeatures, data='where=fid=' + str(fid) + '&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&gdbVersion=&rollbackOnFailure=true&f=json', headers=self.tokenHeaders) elif self.portalInfo['env'] == 'S' or self.portalInfo[ 'env'] == 'P': fid = boundary['attributes']['objectid1'] deleteResponse = requests.post( self.wildfireBoundariesUrl + self.deleteFeatures, verify=False, auth=HttpNegotiateAuth(), data='where=objectid1=' + str(fid) + '&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&gdbVersion=&rollbackOnFailure=true&f=json', headers=self.tokenHeaders) if not deleteResponse.ok: self.logger.warn( 'Unable to delete old boundary data for wildfire event: ' + eventId) else: simplifyBoundaryFC = "in_memory\\_wildfireBoundarySimplified_featureClass" arcpy.SimplifyPolygon_cartography(new_boundary_polygon, simplifyBoundaryFC, 'BEND_SIMPLIFY', '5000 Feet') simplifiedWildfireBoundaryFS = arcpy.FeatureSet() simplifiedWildfireBoundaryFS.load(simplifyBoundaryFC) boundaryJSON = json.loads(simplifiedWildfireBoundaryFS.JSON) boundary = boundaryJSON['features'] if len(boundary) > 0: #insert new wildfire boundaries data wildfireBoundary = WildfireBoundary.WildfireBoundary() wildfireBoundary.consume(self.indexedEventJson, appid, report['attributes'], perimeter, boundary[0]) #POST data to portal if self.portalInfo['useNegotiateAuth'] == True: portalResponse = requests.post( self.wildfireBoundariesUrl + self.addFeatures, data=wildfireBoundary.urlEncode(), headers=self.tokenHeaders, verify=False, auth=HttpNegotiateAuth()) else: portalResponse = requests.post( self.wildfireBoundariesUrl + self.addFeatures, data=wildfireBoundary.urlEncode(), headers=self.tokenHeaders) if portalResponse.ok: responseJSON = json.loads(portalResponse.content) success = responseJSON['addResults'][0]['success'] if success == True: self.logger.info( 'Wildfire boundary data added for event: ' + eventId) else: self.logger.warn( 'Unable to add Wildfire boundary data for event: ' + eventId) else: self.logger.error( 'Server error (' + portalResponse.status_code + ') occurred while adding Wildfire boundary data for event: ' + eventId) return True
def createBoundaryFeatureClass(raster_footprint, target_raster_boundary, statistics_fields="", alter_field_infos=None): a = datetime.datetime.now() aa = a raster_boundary_1 = "{}1".format(target_raster_boundary) deleteFileIfExists(raster_boundary_1, True) arcpy.Buffer_analysis(in_features=raster_footprint, out_feature_class=raster_boundary_1, buffer_distance_or_field="10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="NONE", method="PLANAR") arcpy.RepairGeometry_management(in_features=raster_boundary_1, delete_null="DELETE_NULL") deleteFields(raster_boundary_1) a = doTime(a, "\tBuffer out into {}".format(raster_boundary_1)) raster_boundary_2 = "{}2".format(target_raster_boundary) deleteFileIfExists(raster_boundary_2, True) arcpy.AddMessage( "\tDissolving with statistics: {}".format(statistics_fields)) arcpy.Dissolve_management(in_features=raster_boundary_1, out_feature_class=raster_boundary_2, dissolve_field=FIELD_INFO[ELEV_TYPE][0], statistics_fields=statistics_fields) arcpy.RepairGeometry_management(in_features=raster_boundary_2, delete_null="DELETE_NULL") deleteFields(raster_boundary_2) a = doTime(a, "\tDissolved to {}".format(raster_boundary_2)) deleteFileIfExists(raster_boundary_1, True) alterFields(alter_field_infos, raster_boundary_2) a = doTime(a, "\tAltered Fields on {}".format(raster_boundary_2)) raster_boundary_3 = "{}3".format(target_raster_boundary) deleteFileIfExists(raster_boundary_3, True) arcpy.EliminatePolygonPart_management(in_features=raster_boundary_2, out_feature_class=raster_boundary_3, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY") arcpy.RepairGeometry_management(in_features=raster_boundary_3, delete_null="DELETE_NULL") deleteFields(raster_boundary_3) a = doTime(a, "\tEliminated internal parts on {}".format(raster_boundary_3)) # Don't delete raster boundary 2 because we need it later # JWS 4/26 - Bend Simplify -> Point Remove & 20 Meters -> 0.1 Meters raster_boundary_4 = "{}4".format(target_raster_boundary) deleteFileIfExists(raster_boundary_4, True) arcpy.SimplifyPolygon_cartography(in_features=raster_boundary_3, out_feature_class=raster_boundary_4, algorithm="POINT_REMOVE", tolerance="0.1 Meters", minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="") arcpy.RepairGeometry_management(in_features=raster_boundary_4, delete_null="DELETE_NULL") deleteFields(raster_boundary_4) a = doTime(a, "\tSimplified to {}".format(raster_boundary_4)) deleteFileIfExists(raster_boundary_3, True) deleteFileIfExists(target_raster_boundary, True) arcpy.Buffer_analysis(in_features=raster_boundary_4, out_feature_class=target_raster_boundary, buffer_distance_or_field="-10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="NONE", method="PLANAR") arcpy.RepairGeometry_management(in_features=target_raster_boundary, delete_null="DELETE_NULL") deleteFields(target_raster_boundary) a = doTime(a, "\tBuffer back into {}".format(target_raster_boundary)) deleteFileIfExists(raster_boundary_4, True) if alter_field_infos is not None and len(alter_field_infos) > 0: fields = ";".join([field[1] for field in alter_field_infos]) arcpy.JoinField_management(in_data=target_raster_boundary, in_field="OBJECTID", join_table=raster_boundary_2, join_field="OBJECTID", fields=fields) # Utility.addToolMessages() a = doTime( a, "\tJoined {} with {}".format(target_raster_boundary, raster_boundary_2)) deleteFileIfExists(raster_boundary_2, True) a = doTime( aa, "Dissolved raster footprints to dataset boundary {} ".format( target_raster_boundary))
# simplifyOops2.py # Purpose: Simplify polygons using ten different minimum area values. # Output: Should create 10 output shapefiles. # Warning: This code contains errors! import arcpy, os arcpy.env.overwriteOutput = True arcpy.env.workspace = 'C:/Users/owner/Downloads/Sample_scripts/ch07/' outDir = 'C:/Users/owner/Downloads/Sample_scripts/ch07/chichi/' if not os.path.exists(outDir): os.mkdir(outDir) fc = 'boundingBoxes.shp' x = 1 while x <= 10: try: output = '{0}{1}{2}Simp.shp'.format(outDir, fc[:-12], x) minArea = '{0}'.format(x) arcpy.SimplifyPolygon_cartography(fc, output, '#', minArea) print('Created: {0}'.format(output)) except arcpy.ExecuteError: print(arcpy.GetMessages()) x = x + 1
def createQARasterMosaics(isClassified, gdb_path, spatial_reference, target_folder, mxd, footprint_path=None, lasd_boundary_path=None): mosaics = [] simple_footprint_path = None simple_lasd_boundary_path = None stats_methods = STATS_METHODS for method in stats_methods: arcpy.AddMessage("Creating {} MDS".format(method)) for dataset_name in DATASET_NAMES: name = dataset_name if not isClassified: # Using a generic name for non-classified data name = "" md_name = method if len(name) > 0: md_name = "{}{}".format(method, name) input_folder = os.path.join(target_folder, method, name[1:]) arcpy.AddMessage("Creating {} MD from {}".format(md_name, input_folder)) try: if simple_footprint_path is None: simple_footprint_path = "{}_Simple".format(footprint_path) arcpy.SimplifyPolygon_cartography(in_features=footprint_path, out_feature_class=simple_footprint_path, algorithm="POINT_REMOVE", tolerance=Raster.boundary_interval, minimum_area="0 SquareMeters", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP") Utility.addToolMessages() deleteFields(simple_footprint_path) #try: # arcpy.DeleteField_management(in_table=simple_footprint_path, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol") #except: # pass if simple_lasd_boundary_path is None: simple_lasd_boundary_path = "{}_Simple".format(lasd_boundary_path) arcpy.SimplifyPolygon_cartography(in_features=lasd_boundary_path, out_feature_class=simple_lasd_boundary_path, algorithm="POINT_REMOVE", tolerance=Raster.boundary_interval, minimum_area="0 SquareMeters", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP") Utility.addToolMessages() deleteFields(simple_lasd_boundary_path) #try: # arcpy.DeleteField_management(in_table=simple_lasd_boundary_path, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol") #except: # pass except: arcpy.AddWarning("Failed to create simplified footprints and boundaries in '{}'".format(gdb_path)) qa_md = createQARasterMosaicDataset(md_name, gdb_path, spatial_reference, input_folder, mxd, simple_footprint_path, simple_lasd_boundary_path) if qa_md is not None: mosaics.append(qa_md) md_name = CANOPY_DENSITY dhm_md_path = os.path.join(gdb_path, md_name) mosaics.append([dhm_md_path, md_name]) if arcpy.Exists(dhm_md_path): arcpy.AddMessage("{} already exists.".format(md_name)) else: try: vert_cs_name, vert_unit_name = Utility.getVertCSInfo(spatial_reference) # @UnusedVariable # No need to update boundary and footprints since it will inherit from the original pc_all_md_path = os.path.join(gdb_path, "POINT_COUNT_ALL") createReferenceddMosaicDataset(pc_all_md_path, dhm_md_path, spatial_reference, vert_unit_name) except: arcpy.AddMessage("Failed to create {}".format(dhm_md_path)) deleteFileIfExists(simple_footprint_path, True) deleteFileIfExists(simple_lasd_boundary_path, True) return mosaics
# boundline_model_script.py # Created on: 2015-02-04 18:32:26.00000 # (generated by ArcGIS/ModelBuilder) # Description: # --------------------------------------------------------------------------- # Set the necessary product code # import arcinfo # Import arcpy module import arcpy arcpy.env.overwriteOutput = 1 # Important for debugging; won't freak out about files existing already. # Local variables: fileInput = arcpy.GetParameterAsText(0) boundline = arcpy.GetParameterAsText(1) simpleFile = arcpy.GetParameterAsText(2) spatial_ref = arcpy.Describe(fileInput).spatialReference vertices = arcpy.GetParameterAsText(3) #vertices = "vertices" # Simplify dataset arcpy.SimplifyPolygon_cartography(fileInput,simpleFile,"POINT_REMOVE",50,"","RESOLVE_ERRORS") # Get vertices for dataset arcpy.FeatureVerticesToPoints_management(fileInput, vertices, "ALL") # Create bound using TIN #arcpy.CreateTin_3d(bound_tin, spatial_ref, "vertices, <None>, hardclip")
# Process: rename building_w_single arcpy.CalculateValue_management( "ElectionDistrict (\"%ElectionDistrict%\")", "def ElectionDistrict (ElectionDistrict):\\n outputName=\"\"\"building_address_w_single_ED_%ElectionDistrict%\"\"\"\\n return outputName", "String") # Process: Feature Class to Feature Class (4) arcpy.FeatureClassToFeatureClass_conversion( building_address_w_single__3_, split, output_value, "", "Join_Count \"Join_Count\" true true false 4 Long 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,Join_Count,-1,-1;TARGET_FID \"TARGET_FID\" true true false 4 Long 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,TARGET_FID,-1,-1;BUILDING \"FEATURE_TYPE\" true true false 50 Text 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,BUILDING,-1,-1;ADDR_HOUSE \"ADDR_HOUSENUMBER\" true true false 21 Text 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,ADDR_HOUSENUMBER,-1,-1;ADDR_STREE \"ADDR_STREET\" true true false 169 Text 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,ADDR_STREET,-1,-1;ADDR_STATE \"ADDR_STATE\" true true false 2 Text 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,ADDR_STATE,-1,-1;ADDR_CITY \"ADDR_CITY\" true true false 40 Text 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,ADDR_CITY,-1,-1;SHAPE_Leng \"SHAPE_Leng\" false true true 8 Double 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,SHAPE_Length,-1,-1;SHAPE_Area \"SHAPE_Area\" false true true 8 Double 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,SHAPE_Area,-1,-1;POSTCODE \"POSTCODE\" true true false 50 Text 0 0 ,First,#,C:\\LocalGIS\\OSM\\OSM-BaCo.gdb\\building_address_w_single,POSTCODE,-1,-1", "") # Process: Simplify Polygon arcpy.SimplifyPolygon_cartography(building_address_w_single_ED_10_shp, building_address_w_single_ED1, "POINT_REMOVE", Linear_unit, "0 Unknown", "NO_CHECK", "NO_KEEP") # Process: Project arcpy.Project_management( building_address_w_single_ED1, building_address_w_single_Si1, Coordinate_System, "NAD_1983_HARN_To_WGS_1984_2", "PROJCS['NAD_1983_HARN_StatePlane_Maryland_FIPS_1900_Feet',GEOGCS['GCS_North_American_1983_HARN',DATUM['D_North_American_1983_HARN',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',1312333.333333333],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-77.0],PARAMETER['Standard_Parallel_1',38.3],PARAMETER['Standard_Parallel_2',39.45],PARAMETER['Latitude_Of_Origin',37.66666666666666],UNIT['Foot_US',0.3048006096012192]]" ) # Process: Feature Class to Feature Class arcpy.FeatureClassToFeatureClass_conversion( building_address_w_single_Si1, projected, output_value, "", "Join_Count \"Join_Count\" true true false 4 Long 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,Join_Count,-1,-1;TARGET_FID \"TARGET_FID\" true true false 4 Long 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,TARGET_FID,-1,-1;BUILDING \"BUILDING\" true true false 50 Text 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,BUILDING,-1,-1;ADDR_HOUSE \"ADDR_HOUSE\" true true false 21 Text 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,ADDR_HOUSE,-1,-1;ADDR_STREE \"ADDR_STREE\" true true false 169 Text 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,ADDR_STREE,-1,-1;ADDR_STATE \"ADDR_STATE\" true true false 2 Text 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,ADDR_STATE,-1,-1;ADDR_CITY \"ADDR_CITY\" true true false 40 Text 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,ADDR_CITY,-1,-1;SHAPE_Leng \"SHAPE_Leng\" true true false 8 Double 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,SHAPE_Leng,-1,-1;POSTCODE \"POSTCODE\" true true false 50 Text 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,POSTCODE,-1,-1;Shape_Le_1 \"Shape_Le_1\" true true true 8 Double 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,Shape_Length,-1,-1,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,Shape_length,-1,-1;Shape_Area \"Shape_Area\" true true true 8 Double 0 0 ,First,#,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,Shape_Area,-1,-1,C:\\Users\\eplack\\Documents\\ArcGIS\\Default.gdb\\building_address_w_single_Si1,Shape_area,-1,-1", "")
def simplify(self): try: # Init WorkSpase # arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" duongDanDich = "C:/Generalize_25_50/50K_Final.gdb" urlFile = '/ConfigSimplify.json' _algorithm = "BEND_SIMPLIFY" _tolerance = "50 Meters" _error_option = "NO_CHECK" _collapsed_point_option = "NO_KEEP" _checkExitLayer = False if arcpy.Exists(duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM") and arcpy.Exists(duongDanNguon + "/PhuBeMat/PhuBeMat_Full"): #arcpy.CopyFeatures_management(duongDanNguon + "/PhuBeMat/PhuBeMat_LocMatNuoc", duongDanNguon + "/PhuBeMat/PhuBeMat") _checkExitLayer = True #Doc file config s1 = inspect.getfile(inspect.currentframe()) s2 = os.path.dirname(s1) urlFile = s2 + urlFile arcpy.AddMessage("\n# Doc file cau hinh: \"{0}\"".format(urlFile)) if os.path.exists(urlFile): fileConfig = open(urlFile) listLayerConfig = json.load(fileConfig) fileConfig.close() ############################### Simplify Polygon ######################################## arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.AddMessage("\n# Bat dau Simplify Polygon") listPolygon = [] fieldMappings = arcpy.FieldMappings() enableFields = [] inputsMerge = [] for objConfig in listLayerConfig: if objConfig["LayerType"] == "Polygon" and objConfig["RunStatus"] == "True": if not(_checkExitLayer == False and objConfig["LayerName"] == "PhuBeMat_Full"): temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"], "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolygon.append(temp) elif objConfig["LayerType"] == "Polyline" and objConfig["RunStatus"] == "True" and objConfig["LayerName"] <> "DuongBinhDo": if not(_checkExitLayer == False and objConfig["LayerName"] == "SongSuoiL_KenhMuongL_SnapPBM"): arcpy.AddMessage("\n# Buffer lop: \"{0}\"".format(objConfig["LayerName"])) layerPath = duongDanNguon + "/" + objConfig["DatasetName"] + "/" + objConfig["LayerName"] arcpy.Buffer_analysis(in_features = layerPath, out_feature_class = layerPath + "_Buffer", buffer_distance_or_field = "0.1 Meters", line_side = "RIGHT") temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"] + "_Buffer", "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolygon.append(temp) for element in listPolygon: arcpy.AddMessage("\n# Xu ly lop: {0}".format(element["LayerName"])) layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) arcpy.AddField_management(element["featureLayer"], element["FID_XXX"], "LONG") with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", element["FID_XXX"]]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) arcpy.CopyFeatures_management(layerPath, element["featureCopy"]) arcpy.MakeFeatureLayer_management(element["featureCopy"], element["featureCopyLayer"]) ## Field Mappings ## enableFields.append(element["FID_XXX"]) fieldMappings.addTable(element["featureCopyLayer"]) inputsMerge.append(element["featureCopyLayer"]) for field in fieldMappings.fields: if field.name not in enableFields: fieldMappings.removeFieldMap(fieldMappings.findFieldMapIndex(field.name)) ## Merge ## arcpy.AddMessage("\n# Merge Polygon...") outPathMerge = "in_memory\\outPathMergeTemp" #outPathMerge = "C:/Generalize_25_50/50K_Process.gdb/DanCuCoSoHaTang/outPathMergeTemp" arcpy.Merge_management (inputsMerge, outPathMerge, fieldMappings) ## Simplify Polygon ## arcpy.AddMessage("\n# Simplify Polygon...") outPathSimplify = "in_memory\\outPathSimplifyTemp" #outPathSimplify = "C:/Generalize_25_50/50K_Process.gdb/DanCuCoSoHaTang/outPathSimplifyTemp" arcpy.SimplifyPolygon_cartography(in_features = outPathMerge, out_feature_class = outPathSimplify, algorithm = _algorithm, tolerance = _tolerance, minimum_area = "0 SquareMeters", error_option = _error_option, collapsed_point_option = _collapsed_point_option) ## MakeLayerFeature ## outPathSimplifyLayer = "in_memory\\outPathSimplifyTempLayer" arcpy.MakeFeatureLayer_management(outPathSimplify, outPathSimplifyLayer) ## Update Shape Feature Class ## arcpy.AddMessage("\n# Update Shape Feature Class:") for element in listPolygon: arcpy.AddMessage("\n\t# Update {0}...".format(element["LayerName"])) ### MakeLayerFeature ### layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) ### Select ### strQuery = element["FID_XXX"] + " IS NOT NULL" arcpy.SelectLayerByAttribute_management(outPathSimplifyLayer, "NEW_SELECTION", strQuery) ### Copy To Table Temp ### outTableTemp = "in_memory\\outTableTemp" arcpy.CopyFeatures_management(outPathSimplifyLayer, outTableTemp) ### ... ### with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", "SHAPE@"]) as cursor: for row in cursor: found = False with arcpy.da.UpdateCursor(outTableTemp, [element["FID_XXX"], "SHAPE@"]) as cursorSub: for rowSub in cursorSub: if row[0] == rowSub[0]: found = True row[1] = rowSub[1] cursor.updateRow(row) cursorSub.deleteRow() break if found == False: cursor.deleteRow() arcpy.AddMessage("\n# Hoan thanh Simplify Polygon!!!") ############################################## Simplify Line ############################# arcpy.AddMessage("\n# Bat dau Simplify Line") listPolyLine = [] fieldMappingLine = arcpy.FieldMappings() enableFieldLine = [] inputsMergeLine = [] for objConfig in listLayerConfig: if objConfig["LayerType"] == "Polyline" and objConfig["RunStatus"] == "True": if not(_checkExitLayer == False and objConfig["LayerName"] == "SongSuoiL_KenhMuongL_SnapPBM"): temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"], "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolyLine.append(temp) for element in listPolyLine: arcpy.AddMessage("\n# Xu ly lop: {0}".format(element["LayerName"])) layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] if element["LayerName"] == "DuongBinhDo": arcpy.AddField_management(layerPath, "OLD_OBJECTID", "LONG", None, None, None,"OLD_OBJECTID", "NULLABLE") arcpy.CalculateField_management(layerPath, "OLD_OBJECTID", "!OBJECTID!", "PYTHON_9.3") arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) arcpy.AddField_management(element["featureLayer"], element["FID_XXX"], "LONG") with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", element["FID_XXX"]]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) arcpy.CopyFeatures_management(layerPath, element["featureCopy"]) arcpy.MakeFeatureLayer_management(element["featureCopy"], element["featureCopyLayer"]) ## Field Mappings ## enableFieldLine.append(element["FID_XXX"]) fieldMappingLine.addTable(element["featureCopyLayer"]) inputsMergeLine.append(element["featureCopyLayer"]) for field in fieldMappingLine.fields: if field.name not in enableFieldLine: fieldMappingLine.removeFieldMap(fieldMappingLine.findFieldMapIndex(field.name)) ## Merge ## arcpy.AddMessage("\n# Merge Polyline...") outPathMerge = "in_memory\\outPathMergeTemp" arcpy.Merge_management (inputsMergeLine, outPathMerge, fieldMappingLine) ## Simplify Polyline ## arcpy.AddMessage("\n# Simplify Polyline...") outPathSimplify = "in_memory\\outPathSimplifyTemp" ''' arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/SongSuoiA", "ThuyHe_SongSuoiA_Lyr") arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/MatNuocTinh", "ThuyHe_MatNuocTinh_Lyr") arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/KenhMuongA", "ThuyHe_KenhMuongA_Lyr") in_barriers_Line = ["ThuyHe_SongSuoiA_Lyr", "ThuyHe_MatNuocTinh_Lyr", "ThuyHe_KenhMuongA_Lyr"] ''' arcpy.SimplifyLine_cartography(in_features = outPathMerge, out_feature_class = outPathSimplify, algorithm = _algorithm, tolerance = _tolerance, collapsed_point_option = _collapsed_point_option) ## MakeLayerFeature ## outPathSimplifyLayer = "in_memory\\outPathSimplifyTempLayer" arcpy.MakeFeatureLayer_management(outPathSimplify, outPathSimplifyLayer) ## Update Shape Feature Class ## arcpy.AddMessage("\n# Update Shape Feature Class:") for element in listPolyLine: if element["LayerType"] == "Polyline": arcpy.AddMessage("\n\t# Update {0}...".format(element["LayerName"])) ### MakeLayerFeature ### layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) ### Select ### strQuery = element["FID_XXX"] + " IS NOT NULL" arcpy.SelectLayerByAttribute_management(outPathSimplifyLayer, "NEW_SELECTION", strQuery) ### Copy To Table Temp ### outTableTemp = "in_memory\\outTableTemp" arcpy.CopyFeatures_management(outPathSimplifyLayer, outTableTemp) ### ... ### with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", "SHAPE@"]) as cursor: for row in cursor: found = False with arcpy.da.UpdateCursor(outTableTemp, [element["FID_XXX"], "SHAPE@"]) as cursorSub: for rowSub in cursorSub: if row[0] == rowSub[0]: found = True row[1] = rowSub[1] cursor.updateRow(row) cursorSub.deleteRow() break if found == False: cursor.deleteRow() arcpy.AddMessage("\n# Hoan thanh Simplify Polyline!!!") ############################################## Snap Line to Polygon ############################# arcpy.AddMessage("\n# Bat dau Snap") for elementPolygon in listPolygon: if elementPolygon["LayerType"] == "Polyline": lineLayerName = elementPolygon["LayerName"][:elementPolygon["LayerName"].find('_Buffer')] if (lineLayerName <> "DuongBinhDo"): arcpy.AddMessage("\n\t# Snap: {0}".format(lineLayerName)) layerBufferPath = duongDanNguon + "/" + elementPolygon["DatasetName"] + "/" + elementPolygon["LayerName"] layerLinePath = duongDanNguon + "/" + elementPolygon["DatasetName"] + "/" + lineLayerName arcpy.Snap_edit(layerLinePath, [[layerBufferPath, "EDGE", self.snap_distance]]) ############## Snap Other if _checkExitLayer: arcpy.AddMessage("\n\t# Snap other: {0}".format("PhuBeMat")) arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.Densify_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", "DISTANCE","2 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "35 Meters"]]) arcpy.Integrate_management([[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", 1],[duongDanNguon + "/PhuBeMat/PhuBeMat", 2]], "2 Meters") arcpy.Erase_analysis(in_features = duongDanNguon + "/PhuBeMat/PhuBeMat_Full", erase_features = duongDanNguon + "/PhuBeMat/PhuBeMat", out_feature_class = duongDanNguon + "/PhuBeMat/PhuBeMat_Lo") arcpy.CalculateField_management(duongDanNguon + "/PhuBeMat/PhuBeMat_Lo", "maNhanDang", '"temp123"', "PYTHON_9.3") arcpy.Append_management([duongDanNguon + "/PhuBeMat/PhuBeMat_Lo"], duongDanNguon + "/PhuBeMat/PhuBeMat", "NO_TEST",None,None) arcpy.MultipartToSinglepart_management(duongDanNguon + "/PhuBeMat/PhuBeMat", duongDanNguon + "/PhuBeMat/PhuBeMat2") arcpy.MakeFeatureLayer_management(duongDanNguon + "/PhuBeMat/PhuBeMat2", "PhuBeMat_Temp_Lyr") arcpy.SelectLayerByAttribute_management("PhuBeMat_Temp_Lyr", "NEW_SELECTION", "maNhanDang = 'temp123'") arcpy.Eliminate_management(in_features = "PhuBeMat_Temp_Lyr", out_feature_class = duongDanNguon + "/PhuBeMat/PhuBeMat3", selection = "LENGTH") arcpy.Densify_edit(duongDanNguon + "/ThuyHe/SongSuoiL", "DISTANCE","2 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/ThuyHe/SongSuoiL", [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "2 Meters"]]) arcpy.CopyFeatures_management(duongDanNguon + "/PhuBeMat/PhuBeMat3", duongDanNguon + "/PhuBeMat/PhuBeMat") ############################################## Copy to final ############################# for element in listPolygon: if element["LayerType"] == "Polygon": if element["LayerName"] <> "PhuBeMat_Full": layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] layerFinalPath = duongDanDich + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.DeleteField_management(layerPath, [element["FID_XXX"]]) arcpy.CopyFeatures_management(layerPath, layerFinalPath) for element in listPolyLine: if element["LayerType"] == "Polyline": if element["LayerName"] <> "SongSuoiL_KenhMuongL_SnapPBM": layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] layerFinalPath = duongDanDich + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.DeleteField_management(layerPath, [element["FID_XXX"]]) arcpy.CopyFeatures_management(layerPath, layerFinalPath) #arcpy.AddMessage("\n# Hoan thanh!!!") else: arcpy.AddMessage("\n# Khong tim thay file cau hinh: \"{0}\"".format(urlFile)) except OSError as error: arcpy.AddMessage("Error" + error.message) except ValueError as error: arcpy.AddMessage("Error" + error.message) except arcpy.ExecuteError as error: arcpy.AddMessage("Error" + error.message) finally: arcpy.Delete_management("in_memory")