def simplify_polygon(inputFC): arcpy.AddMessage('Start simplifying polygon...') start6 = timeit.default_timer() arcpy.Generalize_edit(inputFC, '100 Meter') end6 = timeit.default_timer() arcpy.AddMessage( ('End simplifying polygon. Duration:', round(end6 - start6, 4)))
def Coalesce(inFeats, dilDist, outFeats, scratchGDB="in_memory"): '''If a positive number is entered for the dilation distance, features are expanded outward by the specified distance, then shrunk back in by the same distance. This causes nearby features to coalesce. If a negative number is entered for the dilation distance, features are first shrunk, then expanded. This eliminates narrow portions of existing features, thereby simplifying them. It can also break narrow "bridges" between features that were formerly coalesced.''' # If it's a string, parse dilation distance and get the negative if type(dilDist) == str: origDist, units, meas = multiMeasure(dilDist, 1) negDist, units, negMeas = multiMeasure(dilDist, -1) else: origDist = dilDist meas = dilDist negDist = -1 * origDist negMeas = negDist # Parameter check if origDist == 0: arcpy.AddError( "You need to enter a non-zero value for the dilation distance") raise arcpy.ExecuteError # Set parameters. Dissolve parameter depends on dilation distance. if origDist > 0: dissolve1 = "ALL" dissolve2 = "NONE" else: dissolve1 = "NONE" dissolve2 = "ALL" # Process: Buffer Buff1 = scratchGDB + os.sep + "Buff1" arcpy.Buffer_analysis(inFeats, Buff1, meas, "FULL", "ROUND", dissolve1, "", "PLANAR") # Process: Clean Features Clean_Buff1 = scratchGDB + os.sep + "CleanBuff1" CleanFeatures(Buff1, Clean_Buff1) # Process: Generalize Features # This should prevent random processing failures on features with many vertices, and also speed processing in general arcpy.Generalize_edit(Clean_Buff1, "0.1 Meters") # Eliminate gaps # Added step due to weird behavior on some buffers Clean_Buff1_ng = scratchGDB + os.sep + "Clean_Buff1_ng" arcpy.EliminatePolygonPart_management(Clean_Buff1, Clean_Buff1_ng, "AREA", "900 SQUAREMETERS", "", "CONTAINED_ONLY") # Process: Buffer Buff2 = scratchGDB + os.sep + "NegativeBuffer" arcpy.Buffer_analysis(Clean_Buff1_ng, Buff2, negMeas, "FULL", "ROUND", dissolve2, "", "PLANAR") # Process: Clean Features to get final dilated features CleanFeatures(Buff2, outFeats) # Cleanup if scratchGDB == "in_memory": garbagePickup([Buff1, Clean_Buff1, Buff2]) return outFeats
def loop_species(file_dict_fun, group_list, out_path, out_gen): with open(file_dict_fun, 'rU') as inputFile: header = next(inputFile) for line in inputFile: filepath = line.split(",") filepath = filepath[0].strip('\n') print "Working on {0}".format(filepath) # loop over fcs for fc in fcs_in_workspace(filepath): # checks to make sure it is a CH files THIS IS THE HARD CODE CHcheck = fc[:2] if CHcheck == 'CH': entid = fc.split("_") entid = entid[1] # extract entid from filename then compare to the list of species in the current group to see if the # file should be moved to the current outpath if entid not in group_list: continue infc = filepath + os.sep + str(fc) outfc = out_path + os.sep + str(fc) outgen = out_gen + os.sep + str(fc) # moves CH files in found in the current group to the outGDB for that group in the spatial library # found at the outfolder location if not arcpy.Exists(outfc): print "Moving species {0}".format(entid) arcpy.CopyFeatures_management(infc, outfc) arcpy.CopyFeatures_management(infc, outgen) try: arcpy.Generalize_edit(outgen) print "Exported file {0}".format(outfc) except: pass else: print "Previously exported {0}".format(entid) else: continue del header
def execute(in_feature, out_feature, contour_level=None): arcpy.env.overwriteOutput = True arcpy.env.workspace = "in_memory" workspace = "in_memory" # Maintain a list so we can easily merge them back fn_list = [] temp_file = [] cntr = os.path.basename(in_feature) if contour_level is None: levels = range(15, 50, 5) else: levels = contour_level for value in levels: try: out1 = arcpy.CreateUniqueName( arcpy.ValidateTableName(cntr.replace(".shp", "_%d" % value)), workspace) arcpy.Select_analysis(in_feature, out1, where_clause="CONTOUR=%d" % value) print "Select into %s where contour=%d" % (out1, value) temp_file.append(out1) out2_0 = arcpy.CreateUniqueName(out1, workspace) arcpy.FeatureToPolygon_management(out1, out2_0) out2_1 = arcpy.CreateUniqueName(out2_0, workspace) arcpy.Union_analysis([out2_0], out2_1, join_attributes="ONLY_FID", gaps="NO_GAPS") out2 = arcpy.CreateUniqueName(out2_1, workspace) arcpy.Dissolve_management(out2_1, out2, multi_part="SINGLE_PART") temp_file.append(out2_0) temp_file.append(out2_1) temp_file.append(out2) out3 = arcpy.CreateUniqueName(out2, workspace) # Remove some points arcpy.Generalize_edit(out2, "200 Meters") # Then do a smooth out3 = arcpy.CreateUniqueName(out2, workspace) arcpy.SmoothPolygon_cartography(out2, out3, "PAEK", "7000 Meters", "NO_FIXED") print "Copy and smooth %s -> %s" % (out2, out3) calc_field(out3, { "AREA1": "!shape.area!", "dbZ": "%d" % value }, True) temp_file.append(out3) out4 = arcpy.CreateUniqueName(out3, workspace) arcpy.Select_analysis(out3, out4, where_clause="AREA1>30000000") temp_file.append(out4) fn_list.append(out4) except Exception, ex: print ex.message continue
del rows # delete the temp/scratch layer arcpy.Delete_management(countySourceTEMP, "") # remove curves from the the data in our schema current_step += 1 arcpy.AddMessage("[step " + str(current_step) + " of " + str(total_steps) + "] Removing curves, if any...") arcpy.Densify_edit(outputFeatureClass, "ANGLE", "", "", "") # enusre that vertices are not too close, causing errors for the roads and highways system that does not allow vertices within 1 meter - this tool also removes bezier curves and arc segments, converting them to strait lines so I don't think we need the densify tool above, but let's keep it for now. current_step += 1 arcpy.AddMessage("[step " + str(current_step) + " of " + str(total_steps) + "] Generalizing the line features...") arcpy.Generalize_edit(outputFeatureClass, "2 Meters") # remove any segments that are not within the county current_step += 1 arcpy.AddMessage("[step " + str(current_step) + " of " + str(total_steps) + "] Begin removing segments that are outside of the county...") # add space to county name for county query if BoxElder, SaltLake, or SanJuan queryCountyName = countyName if queryCountyName == "BoxElder": queryCountyName = "Box Elder" if queryCountyName == "SaltLake": queryCountyName = "Salt Lake" if queryCountyName == "SanJuan": queryCountyName = "San Juan" queryString = "NAME = '" + queryCountyName + "'" current_step += 1
def get_Footprint(inputRaster): try: ws = arcpy.env.scratchFolder arcpy.env.workspace = ws srWGS84 = arcpy.SpatialReference('WGS 1984') tmpGDB = os.path.join(ws, r"temp.gdb") if not os.path.exists(tmpGDB): arcpy.CreateFileGDB_management(ws, r"temp.gdb") # Calcuate Footprint geometry resampleRaster = os.path.join(ws, 'resampleRaster' + '.tif') bin_Raster = os.path.join(ws, 'bin_Raster' + '.tif') polygon_with_holes = os.path.join(tmpGDB, 'polygon_with_holes') out_Vertices = os.path.join(tmpGDB, 'Out_Vertices') arcpy.AddMessage('Start resampling the input raster...') start1 = timeit.default_timer() rasterProp = arcpy.GetRasterProperties_management( inputRaster, "CELLSIZEX") resampleRaster = arcpy.Resample_management(inputRaster, resampleRaster, 4, "NEAREST") inputSR = arcpy.Describe(resampleRaster).spatialReference end1 = timeit.default_timer() arcpy.AddMessage(('End resampling the input raster. Duration:', round(end1 - start1, 4))) arcpy.AddMessage('Start creating binary raster (Raster Calculator)...') start2 = timeit.default_timer() expression = 'Con(' + '"' + 'resampleRaster' + '.tif' + '"' + ' >= 10 , 1)' bin_Raster = arcpy.gp.RasterCalculator_sa(expression, bin_Raster) end2 = timeit.default_timer() arcpy.AddMessage( ('End creating binary raster. Duration:', round(end2 - start2, 4))) # Convert binary raster to polygon arcpy.AddMessage('Start creating prime polygon from raster...') start3 = timeit.default_timer() # arcpy.RasterToPolygon_conversion(bin_Raster, 'primePolygon.shp', "SIMPLIFY", "VALUE") polygon_with_holes = arcpy.RasterToPolygon_conversion( in_raster=bin_Raster, out_polygon_features=polygon_with_holes, simplify="SIMPLIFY", raster_field="Value", create_multipart_features="SINGLE_OUTER_PART", max_vertices_per_feature="") end3 = timeit.default_timer() arcpy.AddMessage( ('End creating polygon. Duration:', round(end3 - start3, 4))) ### extract the main polygon (with maximum area) which includes several donuts arcpy.AddMessage( 'Start extracting exterior ring (outer outline) of polygon...') start4 = timeit.default_timer() sql_clause = (None, 'ORDER BY Shape_Area DESC') geom = arcpy.Geometry() row = arcpy.da.SearchCursor(polygon_with_holes, ('SHAPE@'), None, None, False, sql_clause).next() geom = row[0] end4 = timeit.default_timer() arcpy.AddMessage( ('End extracting polygon. Duration:', round(end4 - start4, 4))) ### extract the exterior points from main polygon to generate pure polygon from ouer line of main polygon arcpy.AddMessage('Start extracting exterior points ...') start5 = timeit.default_timer() outer_coords = [] for island in geom.getPart(): # arcpy.AddMessage("Vertices in island: {0}".format(island.count)) for point in island: # coords.append = (point.X,point.Y) if not isinstance(point, type(None)): newPoint = (point.X, point.Y) if len(outer_coords) == 0: outer_coords.append(newPoint) elif not newPoint == outer_coords[0]: outer_coords.append((newPoint)) elif len(outer_coords) > 50: outer_coords.append((newPoint)) break # # # # points_FC = arcpy.CreateFeatureclass_management(tmpGDB,"points_FC", "POINT", "", "DISABLED", "DISABLED", inputSR) # # # # i = 0 # # # # with arcpy.da.InsertCursor(points_FC,["SHAPE@XY"]) as cursor: # # # # for coord in outer_coords: # # # # cursor.insertRow([coord]) # # # # i+= 1 # # # # if i > 2: # # # # break # # # # del cursor ### Create footprint featureclass -- > polygon footprint_FC = arcpy.CreateFeatureclass_management( tmpGDB, "footprint_FC", "POLYGON", "", "DISABLED", "DISABLED", inputSR) cursor = arcpy.da.InsertCursor(footprint_FC, ['SHAPE@']) cursor.insertRow([ arcpy.Polygon( arcpy.Array([arcpy.Point(*coords) for coords in outer_coords]), inputSR) ]) del cursor end5 = timeit.default_timer() arcpy.AddMessage( ('End extracting exterior points and inserted as FC. Duration:', round(end5 - start5, 4))) arcpy.AddMessage('Start simplifying footprint polygon...') start6 = timeit.default_timer() arcpy.Generalize_edit(footprint_FC, '100 Meter') finalGeometry = (arcpy.da.SearchCursor(footprint_FC, ('SHAPE@')).next())[0] end6 = timeit.default_timer() arcpy.AddMessage(('End simplifying footprint polygon. Duration:', round(end6 - start6, 4))) footprint_WGS84 = finalGeometry.projectAs(srWGS84) return (footprint_WGS84) except: msgs = "ArcPy ERRORS:\n %s\n" % arcpy.GetMessages(2) arcpy.AddError(msgs) raise
print shp_file if os.path.exists(shp_file): print "ok" else: print "file is not exist" exit(1) ################################################################## arcpy.AddField_management(shp_file, "LengthKM", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(shp_file, "LengthKM", "!shape.geodesicLength@KILOMETERS!", "PYTHON_9.3") ################################################################## arcpy.Generalize_edit(in_features=shp_file, tolerance="1 Centimeters") print "1" # arcpy.Generalize_edit(in_features=shp_file, tolerance="1 Centimeters") # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script # The following inputs are layers or table views: "1" retval = arcpy.Densify_edit(in_features=shp_file, densification_method="DISTANCE", distance="100 Meters", max_deviation=".1 Meters", max_angle="10") print "2" print retval.status retval.save("123.shp") print type(retval.getOutput(0)) print type(retval)
shapefile_out = out_path + out_file_6 arcpy.CopyFeatures_management(shapefile_in, shapefile_out, "", "0", "0", "0") # Action 07:-------------------------------------------------------------------> # Process: Generalize (In order to reduce vertices/elements) #Tolerance 2.5m this reduces the vertices of concave polygon edges significantly #BuffIn makes one vertex into many along a smooth line and this brings it back to 1 vertex print("Process: Generalize_edit") in_file_name_7 = out_file_name_6 in_file_7 = in_file_name_7 + ".shp" shapefile_in = in_path + in_file_7 # Process: Generalize arcpy.Generalize_edit(shapefile_in, "2.5 Meters") # Action 08:-------------------------------------------------------------------> # Process: Feature Vertices To Points print("Process: FeatureVerticesToPoints_management") out_path = in_path in_file_8 = in_file_name_7 + ".shp" #07 did not have output, it edited out_file_name_6 out_file_name_8 = in_file_name_7 + "_VertPt" out_file_8 = out_file_name_8 + ".shp" shapefile_in = in_path + in_file_8 shapefile_out = out_path + out_file_8 # Process: Feature Vertices To Points arcpy.FeatureVerticesToPoints_management(shapefile_in, shapefile_out, "ALL")
def ShrinkWrap(inFeats, dilDist, outFeats, smthMulti=8, scratchGDB="in_memory"): # Parse dilation distance, and increase it to get smoothing distance smthMulti = float(smthMulti) origDist, units, meas = multiMeasure(dilDist, 1) smthDist, units, smthMeas = multiMeasure(dilDist, smthMulti) # Parameter check if origDist <= 0: arcpy.AddError( "You need to enter a positive, non-zero value for the dilation distance" ) raise arcpy.ExecuteError #tmpWorkspace = arcpy.env.scratchGDB #arcpy.AddMessage("Additional critical temporary products will be stored here: %s" % tmpWorkspace) # Set up empty trashList for later garbage collection trashList = [] # Declare path/name of output data and workspace drive, path = os.path.splitdrive(outFeats) path, filename = os.path.split(path) myWorkspace = drive + path Output_fname = filename # Process: Create Feature Class (to store output) arcpy.CreateFeatureclass_management(myWorkspace, Output_fname, "POLYGON", "", "", "", inFeats) # Process: Clean Features #cleanFeats = tmpWorkspace + os.sep + "cleanFeats" cleanFeats = scratchGDB + os.sep + "cleanFeats" CleanFeatures(inFeats, cleanFeats) trashList.append(cleanFeats) # Process: Dissolve Features #dissFeats = tmpWorkspace + os.sep + "dissFeats" # Writing to disk in hopes of stopping geoprocessing failure #arcpy.AddMessage("This feature class is stored here: %s" % dissFeats) dissFeats = scratchGDB + os.sep + "dissFeats" arcpy.Dissolve_management(cleanFeats, dissFeats, "", "", "SINGLE_PART", "") trashList.append(dissFeats) # Process: Generalize Features # This should prevent random processing failures on features with many vertices, and also speed processing in general arcpy.Generalize_edit(dissFeats, "0.1 Meters") # Process: Buffer Features #arcpy.AddMessage("Buffering features...") #buffFeats = tmpWorkspace + os.sep + "buffFeats" buffFeats = scratchGDB + os.sep + "buffFeats" arcpy.Buffer_analysis(dissFeats, buffFeats, meas, "", "", "ALL") trashList.append(buffFeats) # Process: Explode Multiparts #explFeats = tmpWorkspace + os.sep + "explFeats" # Writing to disk in hopes of stopping geoprocessing failure #arcpy.AddMessage("This feature class is stored here: %s" % explFeats) explFeats = scratchGDB + os.sep + "explFeats" arcpy.MultipartToSinglepart_management(buffFeats, explFeats) trashList.append(explFeats) # Process: Get Count numWraps = (arcpy.GetCount_management(explFeats)).getOutput(0) arcpy.AddMessage( 'Shrinkwrapping: There are %s features after consolidation' % numWraps) # Loop through the exploded buffer features counter = 1 with arcpy.da.SearchCursor(explFeats, ["SHAPE@"]) as myFeats: for Feat in myFeats: arcpy.AddMessage('Working on shrink feature %s' % str(counter)) featSHP = Feat[0] tmpFeat = scratchGDB + os.sep + "tmpFeat" arcpy.CopyFeatures_management(featSHP, tmpFeat) trashList.append(tmpFeat) # Process: Repair Geometry arcpy.RepairGeometry_management(tmpFeat, "DELETE_NULL") # Process: Make Feature Layer arcpy.MakeFeatureLayer_management(dissFeats, "dissFeatsLyr", "", "", "") trashList.append("dissFeatsLyr") # Process: Select Layer by Location (Get dissolved features within each exploded buffer feature) arcpy.SelectLayerByLocation_management("dissFeatsLyr", "INTERSECT", tmpFeat, "", "NEW_SELECTION") # Process: Coalesce features (expand) coalFeats = scratchGDB + os.sep + 'coalFeats' Coalesce("dissFeatsLyr", smthMeas, coalFeats, scratchGDB) # Increasing the dilation distance improves smoothing and reduces the "dumbbell" effect. trashList.append(coalFeats) # Eliminate gaps noGapFeats = scratchGDB + os.sep + "noGapFeats" arcpy.EliminatePolygonPart_management(coalFeats, noGapFeats, "PERCENT", "", 99, "CONTAINED_ONLY") # Process: Append the final geometry to the ShrinkWrap feature class arcpy.AddMessage("Appending feature...") arcpy.Append_management(noGapFeats, outFeats, "NO_TEST", "", "") counter += 1 del Feat # Cleanup if scratchGDB == "in_memory": garbagePickup(trashList) return outFeats
arcpy.env.workspace = arcpy.GetParameterAsText(3) arcpy.env.overwriteOutput = True Simun_line = 'Simun_line' Simun_line_generalize = Simun_line Line_vertices = 'Line_vertices' Line_vertices_Layer = 'Line_vertices_Layer' Selected_simun_points = 'Selected_simun_points' Simon_Points_Layer = 'Simon_Points_Layer' # Process: Feature To Line in_feature = Hafkaa_input + ';' + Parcel_input arcpy.FeatureToLine_management(in_feature, Simun_line, "", "NO_ATTRIBUTES") # Process: Generalize arcpy.Generalize_edit(Simun_line, "0.001 Meters") # Process: Densify arcpy.Densify_edit(Simun_line_generalize, "DISTANCE", interval + " Meters", "", "") # Process: Feature Vertices To Points arcpy.FeatureVerticesToPoints_management(Simun_line_generalize, Line_vertices, "ALL") # Process: Make Feature Layer arcpy.MakeFeatureLayer_management( Line_vertices, Line_vertices_Layer, "", "", "OBJECTID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;ORIG_FID ORIG_FID VISIBLE NONE" )
in_location = r'L:\Workspace\ESA_Species\Step3\ToolDevelopment\SpatialLibrary\Range' out_location = r'L:\Workspace\ESA_Species\Step3\ToolDevelopment\SpatialLibrary\Generalized_files\Range' list_gdb = os.listdir(in_location) for v in list_gdb: if len(v.split('.')) == 2: pass else: list_gdb.remove(v) for i in list_gdb: print i in_gdb = in_location + os.sep + i out_gdb = out_location + os.sep + i if not os.path.exists(out_gdb): arcpy.CreateFileGDB_management(out_location, i, "CURRENT") arcpy.env.workspace = in_gdb list_fc = arcpy.ListFeatureClasses() for fc in list_fc: print ' Working on {0} of {1}...'.format((list_fc.index(fc)) + 1, len(list_fc)) inFeatures = in_gdb + os.sep + fc copFeatures = out_gdb + os.sep + fc if not arcpy.Exists(copFeatures): # Since Generalize permanently updates the input, first make a copy of the original FC arcpy.CopyFeatures_management(inFeatures, copFeatures) arcpy.Generalize_edit(copFeatures)
import datetime inlocation = r'L:\Workspace\ESA_Species\Step3\ToolDevelopment\TerrestrialGIS\CriticalHabitat\ShapeWebApp_CH\WebMercator' # Set the workspace arcpy.env.workspace = inlocation fc_list = arcpy.ListFeatureClasses() start_time = datetime.datetime.now() print "Start Time: " + start_time.ctime() for fc in fc_list: try: start_loop = datetime.datetime.now() # Since Generalize permanently updates the input, first make a copy of the original FC # arcpy.CopyFeatures_management (inFeatures, copFeatures) # Use the Generalize tool to simplify the Buffer input to shorten Buffer processing time arcpy.Generalize_edit(inlocation + os.sep + fc) print 'Completed Generalization for {0} in {1}'.format( fc, (datetime.datetime.now() - start_loop)) except Exception as err: print '\nFailed on file: {0}'.format(fc) print(err) end = datetime.datetime.now() print "End Time: " + end.ctime() elapsed = end - start_time print "Elapsed Time: " + str(elapsed)
deleteFeature(polylineLayer, f) deleteFeature(polygonLayerMid2, f) print 'min', datetime.datetime.now() arcpy.MinimumBoundingGeometry_management(flayer, polylineLayer, "CONVEX_HULL") print 'buffer', datetime.datetime.now() arcpy.Buffer_analysis(polylineLayer, polygonLayerMid2, "5000 METERS", method="GEODESIC") print 'dissolve', datetime.datetime.now() arcpy.Dissolve_management(polygonLayerMid2, polygonFinalLayer, multi_part="SINGLE_PART") print 'general', datetime.datetime.now() arcpy.Generalize_edit(polygonFinalLayer, "5000 METERS") deleteFeature(polylineLayer, f) deleteFeature(polygonLayerMid2, f) winsound.Beep(500, 1000) t_end = time.clock() print 'Processing cost {} seconds'.format(t_end - t_start) except: print traceback.format_exc() print >> f, traceback.format_exc()
arcpy.MakeFeatureLayer_management('MU_lines_dis', 'MU_lines_select') arcpy.Delete_management('MU_lines_dis') arcpy.Delete_management("MU_lines") if query: arcpy.MakeFeatureLayer_management(mupolygon, 'MU_select', query) arcpy.SelectLayerByLocation_management("MU_lines_select","SHARE_A_LINE_SEGMENT_WITH",\ 'MU_select',"#","NEW_SELECTION") arcpy.SelectLayerByLocation_management("MU_lines_select","SHARE_A_LINE_SEGMENT_WITH",sapolygon,\ '#',"REMOVE_FROM_SELECTION") else: arcpy.SelectLayerByLocation_management("MU_lines_select","SHARE_A_LINE_SEGMENT_WITH",sapolygon,\ '#',"NEW_SELECTION","INVERT") arcpy.SmoothLine_cartography("MU_lines_select", "MU_lines_gen", "PAEK",\ "25 Meters","FIXED_CLOSED_ENDPOINT", "NO_CHECK") arcpy.Generalize_edit("MU_lines_gen", "2 Meters") arcpy.SelectLayerByAttribute_management("MU_lines_select", "SWITCH_SELECTION") arcpy.Merge_management("MU_lines_select;MU_lines_gen", "MU_lines_gen_merge") arcpy.FeatureToPolygon_management("MU_lines_gen_merge", "MU_gen",\ "", "ATTRIBUTES", "MU_point") arcpy.Delete_management("MU_lines_gen_merge") arcpy.Delete_management("MU_point") arcpy.Delete_management("MU_lines_gen") if query: arcpy.MakeFeatureLayer_management('MU_gen', 'MU_gen_select', query) #Step 1 #Expect to see several warnings of features dissappearing arcpy.Buffer_analysis("MU_gen_select", "MU_negbuff19", "-" + str(min_width) + " Meters") #Step 2
'GEODATABASE' : 'REPORTS.mdb', 'SMLINES' : 'Sm_Lines' } GEODATABASE_GEODATABASE_CONFIG = { 'StartEN' : '\StartEN', 'EndEN' : '\EndEN', 'StartEnd' : '\OSR_POINTS', 'SsReport' : '\SMARTscan_report', 'smPointLyName' : 'Sm_PointsXY', 'smLineLyName' : '\smLineLyName', 'SMLINES_COPY' : 'Test', 'SMLINES' : '\Sm_Lines', 'Copy':'\Test' } arcpy.env.workspace = "C:\\1-CarnellDeployment\GeneralTest" gdbDatabasePath = os.path.join(arcpy.env.workspace, FILE_CONFIG['GEODATABASE']) #Set local parameters inFeatureClass = os.path.join(gdbDatabasePath + GEODATABASE_GEODATABASE_CONFIG['SMLINES']).replace('\\','/') copFeatureClass = os.path.join(gdbDatabasePath + GEODATABASE_GEODATABASE_CONFIG['Copy']).replace('\\','/') gTolerance = "2 Feet" #Since Generalize permanently updates the input, first make a copy of the original FC arcpy.CopyFeatures_management (inFeatureClass, copFeatureClass) #Use the Generalize tool to simplify the Buffer input to shorten Buffer processing time arcpy.Generalize_edit(copFeatureClass, gTolerance)