def Fix_curves(fc,tazar_border,curves): print_arcpy_message("START Func: Fix curves",1) name = fc gdb = os.path.dirname(fc) curves_cut = gdb + '\\' + 'curves_cut' fc2 = gdb + '\\' + 'temp' Delete_polygons(curves,tazar_border,curves_cut) Delete_polygons(fc,curves_cut,fc2) arcpy.MakeFeatureLayer_management(fc2,'ARCEL_ALL_FINAL_lyr') list_Upd = [] cursor = arcpy.SearchCursor(curves_cut) for i in cursor: arcpy.SelectLayerByLocation_management('ARCEL_ALL_FINAL_lyr',"SHARE_A_LINE_SEGMENT_WITH",i.shape) layer_ID = [row.OBJECTID for row in arcpy.SearchCursor('ARCEL_ALL_FINAL_lyr',['OBJECTID','PARCEL_ID']) if row.PARCEL_ID is not None] if layer_ID: list_Upd.append([layer_ID[0],i.shape]) for i in list_Upd: upd_cursor = arcpy.UpdateCursor(fc2) for up_row in upd_cursor: geom = up_row.shape id = up_row.OBJECTID if str(id) == str(i[0]): new_geom = geom.union (i[1]) up_row.shape = new_geom upd_cursor.updateRow(up_row) arcpy.Delete_management (fc) arcpy.Delete_management (curves_cut) arcpy.Rename_management (fc2, name)
def attributeESApoints(cntry, outFeatures, outDB, ntlFile, lcRaster, ghslValu, globalOutlines): tempLC = "%s/%s" % (outDB,"ESA2009") tempPts = "%s/%s" % (outDB,"tPts") tempAdmin = "%s/%s" % (outDB, "Admin1") arcpy.Select_analysis(in_features=globalOutlines, out_feature_class=tempAdmin, where_clause=""""ISO3" = '%s'""" % cntry) tPrint("***Created Admin File") arcpy.Clip_management(in_raster=lcRaster, rectangle="", out_raster=tempLC, in_template_dataset=tempAdmin, nodata_value="0", clipping_geometry="ClippingGeometry") tPrint("***Clipped ESA Globcover") arcpy.RasterToPoint_conversion(in_raster=tempLC, out_point_features=tempPts, raster_field="Value") renameField(tempPts, "grid_code", "Globcover") tPrint("***Converted to points") ExtractValuesToPoints(tempPts, ntlFile, outFeatures, "NONE", "VALUE_ONLY") renameField(outFeatures, "RASTERVALU", "NTL") tPrint("***Extracted NTL Values") fOut = outFeatures.replace("Globcover", "Globcover_GHSL") arcpy.sa.ExtractValuesToPoints(outFeatures, ghslValu, fOut) renameField(fOut, "RASTERVALU", "GHSL") arcpy.Delete_management(outFeatures) arcpy.Rename_management(fOut, outFeatures)
"MX_Distritos_Riego_2015_2016", "MX_Distritos_Riego_2016_2017", "CO_Irrigation_Organizations", "NM_Irrigation_Districts", "TX_Water_Districts" ] sortedList = sorted(fcList) for fc in sortedList: arcpy.FeatureClassToGeodatabase_conversion(fc, out_gdb) feature = fc.split(".shp")[0] fc_gdb = os.path.join(out_gdb, feature) if feature.startswith("NM"): gdbList.append(fc_gdb) else: index = fcList.index(fc) out_featureclass = os.path.join(out_gdb, outNameList[index]) arcpy.Rename_management(fc_gdb, out_featureclass, data_type) gdbList.append(out_featureclass) print "Step 2 completed at", datetime.datetime.now().strftime( "%A, %B %d %Y %I:%M:%S%p") ## --------------------------------------------------------------------------- ## 3. Edit attribute table ## Description: Rename and delete fields print "\nStep 3 Add and delete fields starts at", datetime.datetime.now( ).strftime("%A, %B %d %Y %I:%M:%S%p") fieldsDel = [ "WD", "DITCH_STR", "created_us", "created_da", "last_edite", "last_edi_1", "DATE_ESTD", "Shape__Are", "Shape__Len", "SHAPE_Leng", "GlobalID",
__author__ = 'WebbL' import arcpy from arcpy import env import os folder = r'\\uksapp03\Proj\Drainage\Data\Area04\Inventory_Data\Stage02\HADDMS' #\\uksapp03\proj\Drainage\Data\Area04\Inventory_Data\Stage02\ADC Stage01_ADC.gdb #\\uksapp03\proj\Drainage\Data\Area04\Inventory_Data\Stage02\HADDMS Stage01_HADDMS.gdb #folder = r'L:\Temp\LASTest' env.workspace = folder databases = arcpy.ListWorkspaces('*', 'FileGDB' ) for database in databases: head, tail = os.path.split(database) oldName = tail #newDBName = tail[:-4] + "_HADDMS_Input.gdb" newDBName = oldName.replace('Stage01_HADDMS', '_Stage01_HADDMS') print newDBName arcpy.Rename_management(oldName, newDBName, "FileGDB")
addMsgAndPrint(' ' + str(numberOfRows(inPts)) + ' rows of input features') if numberOfRows(inPts) > 5: addMsgAndPrint('Too many input features. Select fewer!') forceExit() ## if necessary, project inPts to dem srf demSR = arcpy.Describe(dem).spatialReference inPtsSR = arcpy.Describe(inPts).spatialReference if demSR <> inPtsSR: addMsgAndPrint('Projecting structure points from WKID ' + str(inPtsSR.factoryCode) + ' to WKID ' + str(demSR.factoryCode)) # rename inPts inPts2 = inPts + '_2' testAndDelete(inPts2) arcpy.Rename_management(inPts, inPts2) # project inPts2 arcpy.Project_management(inPts2, inPts, demSR) testAndDelete(inPts2) ## Add XYZ values addMsgAndPrint('Getting XYZ values') arcpy.AddXY_management(inPts) arcpy.AddSurfaceInformation_3d(inPts, dem, 'Z') ## for each orientation point # extract xyz, azi, inc # validate azi and inc values with arcpy.da.SearchCursor( inPts, ['OID@', 'POINT_X', 'POINT_Y', 'Z', aziField, incField]) as cursor:
## 4. Shapefile To Table ## Description: Convert shapefile to dbf files for MX. print "\nStep 4 Shapefile To Table starts at", datetime.datetime.now( ).strftime("%A, %B %d %Y %I:%M:%S%p") yearList = ["90", "00", "10"] fcList = ["ingmun90gw.dbf", "ingmun00gw.dbf", "ingmun10gw.dbf"] for year in yearList: for fc in fcList: in_fc = "MX\\" + fc if fc.startswith("ingmun" + year): arcpy.TableToDBASE_conversion(in_fc, interFolder) in_data = os.path.join(interFolder, fc) out_data = os.path.join(interFolder, "IncomeMX_" + year + ".dbf") arcpy.Rename_management(in_data, out_data) arcpy.AddField_management(out_data, "ISO_GEOID", "TEXT", "", "", "8") if out_data.endswith("90.dbf"): arcpy.CalculateField_management(out_data, "ISO_GEOID", "'484' + !CLAVE!", "PYTHON_9.3") elif out_data.endswith("00.dbf"): arcpy.CalculateField_management(out_data, "ISO_GEOID", "'484' + !CVE_MUN!", "PYTHON_9.3") elif out_data.endswith("10.dbf"): arcpy.CalculateField_management(out_data, "ISO_GEOID", "'484' + !MUN_OFICIA!", "PYTHON_9.3") dbfList_MX.append(out_data)
if arcpy.Exists(kd_result): txtFile.write("Deleting: {} - {}{}".format(kd_result, time.strftime("%X"), "\n")) arcpy.Delete_management(kd_result) else: arcpy.AddMessage("Using Existing Raster: {} - {}".format(OutputKDr, time.strftime("%X"))) txtFile.write("Using Existing Raster: {} - {}{}".format(OutputKDr, time.strftime("%X"), "\n")) if arcpy.Exists(OutputKDr): # arcpy.AlterAliasName(OutputKDr, KDrAlias) arcpy.Rename_management(OutputKDr, KDrAlias) if not arcpy.Exists(OutputKDv): arcpy.AddMessage("Slicing to Integer KD: {} - {}".format(OutputKDr, time.strftime("%X"))) txtFile.write("Slicing to Integer KD: {} - {}{}".format(OutputKDr, time.strftime("%X"), "\n")) # int_result = "{}_Tmp".format(OutputKDr) # arcpy.Int_3d(OutputKDr, int_result) int_result = arcpy.sa.Slice(OutputKDr, 21, "NATURAL_BREAKS", 0); # int_result.save("{}_Tmp".format(OutputKDr)) # int_result.save(OutputKDr) arcpy.AddMessage("Building KDv: {} - {}".format(OutputKDv,
# Renomeia os "FeatureClass" print '## Etapa 10: Renomeia os "FeatureClass"' workspace = os.path.join(Input, folder, 'Geodata', 'Geo_SiCAR.mdb') walk = arcpy.da.Walk(workspace, datatype="FeatureClass", type="All") for dirpath, dirnames, filenames in walk: for filename in filenames: parte1 = filename.split("_", 1)[0] print parte1 filefullpath = os.path.join(workspace, filename) if parte1 == 'Nascente': parte3 = filename.split("_", 3)[2] newname = os.path.join(workspace, parte1 + '_' + parte3) try: arcpy.Rename_management(filefullpath, newname) except OSError: pass else: newname = os.path.join(workspace, parte1) try: arcpy.Rename_management(filefullpath, newname) except OSError: pass # ------------------------------------------------------------------------------------------------------- # Finalizando arcpy.ResetEnvironments() print '# ' + '-' * 100 print '# End'
def Eras_Merge(inFC, outFC, eraseFC, mergeFC): arcpy.Erase_analysis(inFC, outFC, eraseFC) arcpy.Merge_management([eraseFC, outFC],mergeFC) arcpy.Delete_management(inFC) #arcpy.CopyRows_management(mergeFC, inFC) arcpy.Rename_management(mergeFC, inFC, "FeatureClass")
def clean_name(x): clean_name = x.replace("_Current", "") clean_name = clean_name.replace("var._", "var_") clean_name = clean_name.replace("ssp._", "ssp_") arcpy.Rename_management(x, clean_name)
TWI_raster_masked = arcpy.sa.ExtractByMask(TWI_raster, mask) TWI_out_str = "TWI" TWI_raster_masked.save(arcpy.env.scratchWorkspace + "\\" + TWI_out_str) master_list += [TWI_out_str] print(TWI_out_str + ' has been created and saved') """____________CREATE MASTER RASTER____________""" # Create the name of the master file which includes the resolution of the rasters master_name_str = 'Master_' + str(new_res_value_float).replace( '.', '_') + 'm_dry_cover' print(master_list) # Add the scratch workspace string to the master list master_list_scratch = [] for i in master_list: master_list_scratch += [arcpy.env.scratchWorkspace + "\\" + i] # Combine all vegetation and topographic indices and the SMC validation dataset into 1 master raster file arcpy.CompositeBands_management(master_list_scratch, master_name_str) """____________RENAME BANDS IN MASTER RASTER____________""" n = 0 for name in master_list: n += 1 arcpy.Rename_management(master_name_str + r"\Band_" + str(n), master_name_str + r"\\" + name) print("All bands in the master raster have been renamed") print("Closing script....")
def rfpRoute(inputRFPSites): if userFacilities: ##-- User has provided a facilities layer to route to rather than a hub --## arcpy.AddMessage('User Facility layer has been provided...') arcpy.MakeFeatureLayer_management(userFacilities,'hubSites') hubNum = int(arcpy.GetCount_management('hubSites').getOutput(0)) arcpy.AddMessage('Found ' + str(hubNum) + ' user facilities.') else: ##--- Select hub site ---## arcpy.MakeFeatureLayer_management(inputRFPSites,'hubSites') arcpy.SelectLayerByAttribute_management('hubSites','NEW_SELECTION','"Hub" = 1') hubNum = int(arcpy.GetCount_management('hubSites').getOutput(0)) arcpy.AddMessage('Found ' + str(hubNum) + ' Hub Site') if hubNum < 1: return "noHub" ##--- Locate Assets to hub site against the network ---## arcpy.ImportToolbox(scriptLocation + os.sep + 'Backhaul' + os.sep + 'Backhaul.pyt') arcpy.AddMessage('Beginning Locate Assets...') arcpy.LocateAssets_backhaul(inputRFPSites,'hubSites',inputNetwork,outputLocation) arcpy.AddMessage('- Locate Assets completed successfully') ##--- Backhaul Optimization ---## arcpy.AddMessage('Creating Closest Facility layer...') backClosestFacility = arcpy.na.MakeClosestFacilityLayer(inputNetwork,"ClosestFacility","LENGTH","TRAVEL_TO","","","","ALLOW_UTURNS","","NO_HIERARCHY","","TRUE_LINES_WITH_MEASURES","","") arcpy.AddMessage('- Closest Facility layer created successfully') arcpy.ImportToolbox(scriptLocation + os.sep + 'Backhaul' + os.sep + 'Backhaul.pyt') arcpy.AddMessage('Beginning Backhaul Optimization...') arcpy.BackhaulAssets_backhaul(locRemoteAssets, locFixedAssets, locNearTable, backClosestFacility, outputLocation,"50","10","TRUE") arcpy.AddMessage('- Backhaul Optimization completed successfully') ##--- Cleanup the routes ---## arcpy.AddMessage('Cleaning up the routes...') arcpy.Intersect_analysis(backRoutes,'routes_intersected','ALL','','INPUT') arcpy.AddMessage('- Routes intersected.') arcpy.Erase_analysis(backRoutes,'routes_intersected','routes_erased','') arcpy.AddMessage('- Overlapping routes erased.') arcpy.Merge_management(['routes_intersected','routes_erased'],'routes_cleaned') arcpy.AddMessage('- Completed cleaning routes.') arcpy.DeleteIdentical_management('routes_cleaned','Shape','','') arcpy.AddMessage('- Duplicate features removed.') ##--- Determine which routes are new versus existing ---## arcpy.AddMessage('Determining New versus Existing routes...') arcpy.Identity_analysis('routes_cleaned','fiber','routes_identity','ONLY_FID') arcpy.MakeFeatureLayer_management('routes_identity','ident') selection = "\"FID_FIBERCABLE_forMultimodal\" <> -1" arcpy.SelectLayerByAttribute_management('ident',"NEW_SELECTION",selection) arcpy.AddField_management('ident','Status','Text',field_length=5) arcpy.CalculateField_management('ident','Status',"'E'",'PYTHON') arcpy.SelectLayerByAttribute_management('ident','CLEAR_SELECTION') selection = "\"FID_FIBERCABLE_forMultimodal\" = -1" arcpy.SelectLayerByAttribute_management('ident',"NEW_SELECTION",selection) arcpy.CalculateField_management('ident','Status',"'N'",'PYTHON') arcpy.SelectLayerByAttribute_management('ident','CLEAR_SELECTION') arcpy.Dissolve_management('ident','routes_dissolve',['FID_routes_cleaned','Status']) arcpy.MakeFeatureLayer_management('routes_dissolve','routes') arcpy.AddMessage('- Delineated existing routes from new routes.') #arcpy.DeleteField_management('routes',['FacilityID','FacilityRank','Name','IncidentCurbApproach','FacilityCurbApproach','IncidentID','Total_Length','startID','endID','startAsset','endAsset','startName','endName','FID_routes_cleaned','FID_FIBERCABLE_forMultimodal']) arcpy.DeleteField_management('routes',['FacilityID','FacilityRank','Name','IncidentCurbApproach','FacilityCurbApproach','IncidentID','Total_Length','startID','endID','startAsset','endAsset','startName','endName']) arcpy.AddMessage('- Removed unnecessary fields.') ##--- Copy the sites and facilities to the output gdb ---## arcpy.AddMessage('Finalizing output data...') arcpy.CopyFeatures_management(rfpGroup,'parent_sites_'+siteName) arcpy.AddMessage('- Copied RFP Sites.') arcpy.SelectLayerByLocation_management('hubSites','INTERSECT','routes') arcpy.CopyFeatures_management('hubSites','hubs_'+siteName) arcpy.AddMessage('- Copied CO Facilities.') ##--- Populate SiteName attribute for the Lateral Segments ---## arcpy.AddMessage('Calculating route attributes...') arcpy.AddField_management('routes','Site_Name','Text',field_length=255) arcpy.AddField_management('routes','Type','Text',field_length=5) arcpy.AddField_management('routes','Length_mi','FLOAT',field_scale=4) arcpy.AddField_management('routes','Route_Name','Text',field_length=255) arcpy.AddField_management('routes','FolderPath','Text',field_length=255) arcpy.AlterField_management('routes','FID_routes_cleaned','FID_Routes','FID_Routes') arcpy.MakeFeatureLayer_management('parent_sites_'+siteName,'sites') siteNum = int(arcpy.GetCount_management('sites').getOutput(0)) cursor = arcpy.SearchCursor('sites') for row in cursor: if isinstance(row.getValue(siteNameField),basestring): singleSiteName = "'" + row.getValue(siteNameField) + "'" else: singleSiteName = str(int(row.getValue(siteNameField))) selection = "\"" + siteNameField + "\" = " + singleSiteName arcpy.SelectLayerByAttribute_management('sites',"NEW_SELECTION",selection) arcpy.SelectLayerByLocation_management('routes','INTERSECT','sites') arcpy.CalculateField_management('routes','Site_Name',singleSiteName,'PYTHON') arcpy.CalculateField_management('routes','Type',"'L'",'PYTHON') arcpy.SelectLayerByAttribute_management('sites','CLEAR_SELECTION') arcpy.SelectLayerByAttribute_management('routes','CLEAR_SELECTION') selection = "\"Type\" Is NULL" arcpy.SelectLayerByAttribute_management('routes',"NEW_SELECTION",selection) arcpy.CalculateField_management('routes','Type',"'SL'",'PYTHON') arcpy.AddMessage('- Calculated Site Information and Lateral Type.') arcpy.SelectLayerByAttribute_management('routes','CLEAR_SELECTION') arcpy.CalculateField_management('routes','Length_mi',"round(!shape.length@miles!,4)","PYTHON_9.3") arcpy.AddMessage('- Calculated Route Segment Length.') arcpy.CalculateField_management("routes_dissolve","Route_Name","[FID_Routes]&\"^\" & [Type]&\"^\"& [Status]&\"^\"&[Length_mi]&\"^\"& [Site_Name]","VB") arcpy.AddMessage('- Calculated Route Name.') calcVal = "\"Fiber/"+siteName+"/\"& [Route_Name]" arcpy.CalculateField_management("routes_dissolve","FolderPath",calcVal,"VB") arcpy.AddMessage('- Calculated Folder Path.') ##--- Select by route type and total the length by type ---## summaryCSV(siteName,'routes',siteNum) outputRoute = 'routes_'+siteName arcpy.Rename_management('routes_dissolve',outputRoute) return outputRoute
def sbdd_ProcessAddress (myFD, myFL): arcpy.AddMessage(" Begining Address Processing") theFields = ["FRN","PROVNAME","DBANAME","TRANSTECH","MAXADDOWN","MAXADUP", "TYPICDOWN","TYPICUP","Provider_Type","ENDUSERCAT"] chkFC = ["Address_frq","Address"] for cFC in chkFC: if arcpy.Exists(cFC): arcpy.Delete_management(cFC) if int(arcpy.GetCount_management(myFD + "/" + myFL).getOutput(0)) > 1: arcpy.Frequency_analysis(myFD + "/" + myFL, "Address" + "_frq", theFields, "") #open a cursor loop to get all the distinct values myCnt = 1 theQ = "(MAXADDOWN = '3' OR MAXADDOWN = '4' OR MAXADDOWN = '5' OR MAXADDOWN = '6' OR " + \ " MAXADDOWN = '7' OR MAXADDOWN = '8' OR MAXADDOWN = '9' OR MAXADDOWN = '10' OR MAXADDOWN = '11') AND " + \ "(MAXADUP = '2' OR MAXADUP = '3' OR MAXADUP = '4' OR MAXADUP = '5' OR MAXADUP = '6' OR " + \ " MAXADUP = '7' OR MAXADUP = '8' OR MAXADUP = '9' OR MAXADUP = '10' OR MAXADUP = '11' )" for row in arcpy.SearchCursor("Address" + "_frq", theQ): theProviderType=row.getValue("Provider_Type") theEndUserCat=row.getValue("ENDUSERCAT") theProvName = row.getValue("PROVNAME").replace("'","") theDBA = row.getValue("DBANAME").replace("'","") theFRN = row.getValue("FRN") theTransTech = row.getValue("TRANSTECH") theAdUp = row.getValue("MAXADUP") theAdDown = row.getValue("MAXADDOWN") theTyUp = row.getValue("TYPICUP") theTyDown = row.getValue("TYPICDOWN") theTyUpQ = "" theTyDownQ = "" if theTyUp == "ZZ": theTyUp = "ZZ" #used for naming / logic on calculating theTyUpQ = "TYPICUP = 'ZZ'" #used as a selection set elif theTyUp == None: theTyUp = "IsNull" #used for naming / logic on calculating theTyUpQ = "TYPICUP Is Null" #used as a selection set elif theTyUp == " ": theTyUp = "IsNull" theTyUpQ = "TYPICUP = ' '" else: theTyUp = str(abs(int(theTyUp))) theTyUpQ = "TYPICUP = '" + theTyUp + "'" if theTyDown == "ZZ": theTyDown = "ZZ" #used for naming / logic on calculating theTyDownQ = "TYPICDOWN = 'ZZ'" #used as a selection set elif theTyDown == None: theTyDown = "IsNull" theTyDownQ = "TYPICDOWN Is Null" elif theTyDown == " ": theTyDown = "IsNull" theTyDownQ = "TYPICDOWN = ' '" else: theTyDown = str(abs(int(theTyDown))) theTyDownQ = "TYPICDOWN = '" + theTyDown + "'" theQry = "FRN = '" + theFRN + "'" theQry = theQry + " AND TRANSTECH = " + str(theTransTech) theQry = theQry + " AND MAXADDOWN = '" + theAdDown + "' AND MAXADUP = '" theQry = theQry + theAdUp + "' AND " + theTyUpQ + " AND " + theTyDownQ myFLName = theFRN + str(theTransTech) + theAdUp + theAdDown + theTyUp + theTyDown arcpy.MakeFeatureLayer_management(myFD + "/" + myFL, myFLName, theQry) if int(arcpy.GetCount_management(myFLName).getOutput(0)) > 0 : #originally 1 for the raster case outPT = theST + theFRN + "_" + str(theTransTech) + "_" + theAdDown + "_" + \ theAdUp + "_" + theTyDown + "_" + theTyUp + "_x" #the selection of points outRT = theST + theFRN + "_" + str(theTransTech) + "_" + theAdDown + "_" + \ theAdUp + "_" + theTyDown + "_" + theTyUp + "_g" #the raster grid inPly = theST + theFRN + "_" + str(theTransTech) + "_" + theAdDown + "_" + \ theAdUp + "_" + theTyDown + "_" + theTyUp + "_p" #the output of grid poly bfPly = theST + theFRN + "_" + str(theTransTech) + "_" + theAdDown + "_" + \ theAdUp + "_" + theTyDown + "_" + theTyUp + "_pb" #the output of buffer chkFC = [outPT, outRT, inPly, bfPly] for cFC in chkFC: if arcpy.Exists(cFC): arcpy.Delete_management(cFC) del cFC, chkFC #first create a feature class of the selected points arcpy.FeatureClassToFeatureClass_conversion(myFLName, thePGDB, outPT) arcpy.RepairGeometry_management(outPT) arcpy.Delete_management(myFLName) if int(arcpy.GetCount_management(outPT).getOutput(0)) > 50: #arcpy.AddMessage(" processing by raster point: " + outPT) #second covert the selection to a grid data set (e.g. raster) arcpy.PointToRaster_conversion(outPT, "FRN", outRT, "", "", 0.0028) theH = arcpy.Describe(outRT).Height theW = arcpy.Describe(outRT).Width if int(theH) > 2 and int(theW) > 2: #third convert the rasters back to a polygon arcpy.RasterToPolygon_conversion(outRT, inPly, "NO_SIMPLIFY", "") arcpy.AddField_management (inPly, "FRN", "TEXT", "", "", 10) arcpy.AddField_management (inPly, "PROVNAME", "TEXT", "", "", 200) arcpy.AddField_management (inPly, "DBANAME", "TEXT", "", "", 200) arcpy.AddField_management (inPly, "TRANSTECH", "SHORT", "", "", "") arcpy.AddField_management (inPly, "MAXADDOWN", "TEXT", "", "", 2) arcpy.AddField_management (inPly, "MAXADUP", "TEXT", "", "", 2) arcpy.AddField_management (inPly, "TYPICDOWN", "TEXT", "", "", 2) arcpy.AddField_management (inPly, "TYPICUP", "TEXT", "", "", 2) arcpy.AddField_management (inPly, "State", "TEXT", "", "", 2) arcpy.AddField_management (inPly, "Provider_Type", "SHORT", "", "", "") arcpy.AddField_management (inPly, "ENDUSERCAT", "TEXT", "", "", 2) arcpy.CalculateField_management(inPly, "FRN", "'" + theFRN + "'" ,"PYTHON") arcpy.CalculateField_management(inPly, "PROVNAME", r"'" + theProvName + "'" ,"PYTHON") arcpy.CalculateField_management(inPly, "DBANAME", r"'" + theDBA + "'" ,"PYTHON") arcpy.CalculateField_management(inPly, "TRANSTECH", theTransTech, "PYTHON") arcpy.CalculateField_management(inPly, "MAXADDOWN", "'" + theAdDown + "'" ,"PYTHON") arcpy.CalculateField_management(inPly, "MAXADUP", "'" + theAdUp + "'" ,"PYTHON") #arcpy.AddMessage("theProvider_type: " + str(theProviderType)) if theTyDown <> "IsNull": arcpy.CalculateField_management(inPly, "TYPICDOWN", "'" + theTyDown + "'" ,"PYTHON") if theTyUp <> "IsNull": arcpy.CalculateField_management(inPly, "TYPICUP", "'" + theTyUp + "'" ,"PYTHON") arcpy.CalculateField_management(inPly, "State", "'" + theST + "'" ,"PYTHON") arcpy.CalculateField_management(inPly, "Provider_Type", theProviderType,"PYTHON") arcpy.CalculateField_management(inPly, "ENDUSERCAT", "'" + theEndUserCat + "'" ,"PYTHON") #arcpy.AddMessage("theProvider_type: " + str(theProviderType)) arcpy.Buffer_analysis(inPly, bfPly, "100 Feet", "FULL", "ROUND", "LIST", theFields) if myCnt == 1: #this is the first time through, rename the bfPly to Address arcpy.Rename_management(bfPly,"Address") else: #otherwise append it to the first one through arcpy.Append_management([bfPly], "Address") del theH, theW #then buffer them else: arcpy.AddMessage(" processing by buffering: " + outPT) arcpy.Buffer_analysis(outPT, bfPly, "500 Feet", "FULL", "ROUND", "LIST", theFields) if myCnt == 1: #this is the first time through, rename the bfPly to Address arcpy.Rename_management(bfPly,"Address") else: #otherwise append it to the first one through arcpy.Append_management([bfPly], "Address") chkFC = [outPT, outRT, inPly, bfPly] for cFC in chkFC: if arcpy.Exists(cFC): arcpy.Delete_management(cFC) del outPT, outRT, inPly, bfPly, cFC, chkFC myCnt = myCnt + 1 del theProvName, theDBA, theFRN, theTransTech, theAdUp, theAdDown, theTyUp, \ theTyUpQ, theTyDown, theTyDownQ, theQry, myFLName, theProviderType,theEndUserCat sbdd_ExportToShape("Address") arcpy.Delete_management("Address_frq") del row, myCnt, theFields, theQ, myFL, myFD return ()
import inspect import arcpy try: arcpy.env.workspace = r"C:\Users\spowell\AppData\Roaming\ESRI\Desktop10.6\ArcCatalog\localhost.sde" # # arcgis stuff for multi-users arcpy.AcceptConnections(arcpy.env.workspace, False) arcpy.DisconnectUser(arcpy.env.workspace, "ALL") arcpy.Delete_management("boundary/county_parcel_1") arcpy.Rename_management("boundary/county_parcel", "boundary/county_parcel_1", "FeatureClass") arcpy.Rename_management("boundary/county_parcel_1", "boundary/county_parcel", "FeatureClass") except Exception as e: print(f"{inspect.stack()[0][3]} {e}")
'Extruded' ] for item in del_fields: arcpy.DeleteField_management(results_FC, item) #remove processing layers for lyr in arcpy.mapping.ListLayers(mxd, "", df): if lyr.name.split('_')[-1] == "Projected": arcpy.mapping.RemoveLayer(df, lyr) #find and remove problematic feature class names by appending to other FC's or renaming arcpy.env.workspace = results_dataset for FC in arcpy.ListFeatureClasses(): Append = False Rename = False with arcpy.da.SearchCursor(FC, ['Name', 'FolderPath']) as SC: row = next(SC) Slash_Count = row[0].count('/') if Slash_Count <> 0: FC_correct_name = results_dataset + '\\' + row[1].split("/")[ -1 - Slash_Count].replace(' ', '_') if arcpy.Exists(FC_correct_name): Append = True else: Rename = True if Append: arcpy.Append_management(FC, FC_correct_name) arcpy.Delete_management(FC) if Rename: arcpy.Rename_management(FC, FC_correct_name)
clean_mean = Int(1000 * Con( (Raster(bingmean_odd) < 2.0) & IsNull(Raster(logomask_odd)) & (Raster(traffic3am_ras) < 1.0) & (Raster(bingmean_even) < 2.0) & IsNull(Raster(logomask_even)) & (Raster(traffic2am_ras) < 1.0), (Raster(bingmean_odd) + Raster(bingmean_even)) / 2, Con((Raster(bingmean_odd) < 2.0) & IsNull(Raster(logomask_odd)) & (Raster(traffic3am_ras) < 1.0), Raster(bingmean_odd), Con((Raster(bingmean_even) < 2.0) & IsNull(Raster(logomask_even)) & (Raster(traffic2am_ras) < 1.0), Raster(bingmean_even))))) print('Done cleaning and averaging, running euclidean allocation') euc = arcpy.sa.EucAllocation(clean_mean, maximum_distance=50) euc.save(bingeuc) arcpy.Rename_management(bingeuc, outeuc) x += 1 #Mosaic all tiles arcpy.ResetEnvironments() if not arcpy.Exists(path.join(gdb, 'bingeuc1902')): arcpy.env.workspace = os.path.join(res) arcpy.MosaicToNewRaster_management( arcpy.ListRasters('bingeuc2*'), gdb, 'bingeuc1902', #coordinate_system_for_the_raster= arcpy.Describe(arcpy.ListRasters('bingeuc2*')[0]).SpatialReference, pixel_type='16_BIT_UNSIGNED', number_of_bands=1, mosaic_method='LAST')
# Filename: PrefixSuffix.py # Purpose: Add a suffix to a dataset name. import arcpy, os inws = arcpy.GetParameterAsText(0) prefix = arcpy.GetParameterAsText(1) suffix = arcpy.GetParameterAsText(2) filelist = [] for root, dirs, files in arcpy.da.Walk(inws): for file in files: filelist.append(os.path.join(root, file)) for file in filelist: name = os.path.join( inws, prefix + os.path.splitext(os.path.basename(file))[0] + suffix) arcpy.Rename_management(file, name)
def fcs_in_workspace(InputFolder): # Create output folder if not os.path.exists(OutputFolder): os.makedirs(OutputFolder) # Create loop for copy feature suffix = ".shp" for root, subFolders, files in os.walk(InputFolder): for fileName in files: if fileName.endswith(suffix): arcpy.env.workspace = root fclist = arcpy.ListFeatureClasses() # creating loop in fclist for fc in fclist: fc_copy = os.path.join(OutputFolder, fc.strip(".shp")) fc_path = os.path.join(OutputFolder, fc) # must make a existing copy in order to describe arcpy.CopyFeatures_management(fc, fc_copy) desc = arcpy.Describe(fc_path) if desc.name == "Income_EA.shp": # make function variables fc_copy1 = os.path.join(OutputFolder, fc.strip(".shp") + "_1") fc_path1 = os.path.join(OutputFolder, fc_copy1 + ".shp") # copy features before changing original fc copy # fc must be start with upper case character to copy feature correctly arcpy.CopyFeatures_management(fc_path, fc_copy1) # List the copy feature attribute fields fields = arcpy.ListFields(fc_path) # Manually enter field names to keep here # include mandatory fields name in keepfields keepFields = ["FID", "Shape", "EA", "EA_STR", "OID_", "COL_60"] # Automatically drop fields dropFields = [x.name for x in fields if x.name not in keepFields] # Delete fields arcpy.DeleteField_management(fc_path, dropFields) # Rename arcpy.Rename_management(fc_path, "Household_Average_Income_Income_1991_DA.shp") # List the copy feature attribute fields fields = arcpy.ListFields(fc_path1) # Manually enter field names to keep here # include mandatory fields name in keepfields keepFields = ["FID", "Shape", "EA", "EA_STR", "OID_", "COL_54"] # Automatically drop fields dropFields = [x.name for x in fields if x.name not in keepFields] # Delete fields arcpy.DeleteField_management(fc_path1, dropFields) # Rename arcpy.Rename_management(fc_path1, "Low_Income_Income_1991_DA.shp") elif desc.name == "Dwelling_households_B_EA.shp": # make function variables fc_copy2 = os.path.join(OutputFolder, fc.strip(".shp") + "_1") fc_copy3 = os.path.join(OutputFolder, fc.strip(".shp") + "_2") fc_path2 = os.path.join(OutputFolder, fc_copy2 + ".shp") fc_path3 = os.path.join(OutputFolder, fc_copy3 + ".shp") # copy feature # fc must be start with upper case character to copy feature correctly arcpy.CopyFeatures_management(fc, fc_copy2) arcpy.CopyFeatures_management(fc, fc_copy3) # List the copy feature attribute fields fields = arcpy.ListFields(fc_path) # Manually enter field names to keep here # include mandatory fields name in keepfields keepFields = ["FID", "Shape", "EA", "EA_STR", "OID_", "COL_9"] # Automatically drop fields dropFields = [x.name for x in fields if x.name not in keepFields] # Delete fields arcpy.DeleteField_management(fc_path, dropFields) # Rename arcpy.Rename_management(fc_path, "Dwellings_Average_Value_Dwellings_1991_DA.shp") # List the copy feature attribute fields fields = arcpy.ListFields(fc_path2) # Manually enter field names to keep here # include mandatory fields name in keepfields keepFields = ["FID", "Shape", "EA", "EA_STR", "OID_", "COL_34"] # Automatically drop fields dropFields = [x.name for x in fields if x.name not in keepFields] # Delete fields arcpy.DeleteField_management(fc_path2, dropFields) # Rename arcpy.Rename_management(fc_path2, "Lone_Parent_Dwellings_1991_DA.shp") # List the copy feature attribute fields fields = arcpy.ListFields(fc_path3) # Manually enter field names to keep here # include mandatory fields name in keepfields keepFields = ["FID", "Shape", "EA", "EA_STR", "OID_", "COL_51"] # Automatically drop fields dropFields = [x.name for x in fields if x.name not in keepFields] # Delete fields arcpy.DeleteField_management(fc_path3, dropFields) # Rename arcpy.Rename_management(fc_path3, "Live_Alone_Dwellings_1991_DA.shp")
import arcpy from arcpy import env env.workspace=r'S:\LV_Valley_Imagery\2017\SwimmingPool2017\gdb\review_data.gdb' env.overwriteOutput = True fcs = arcpy.ListFeatureClasses() for fc in fcs: if len(fc) == 20: print fc arcpy.Rename_management(fc, 'Review_' + fc[4:12])
field1 = "SCIEN_NAME" field = ["SCIEN_NAME"] values = [row[0] for row in arcpy.da.SearchCursor(fc, (field))] uniquevalues = set(values) sci_name = list(uniquevalues)[0] sqlcursor.execute( '''SELECT Code from tblInputs WHERE SCIEN_NAME=?''', (sci_name, )) test_one = sqlcursor.fetchone() print fc print test_one[0] except: print "There may be a problem grabbing the code for " + str(fc) try: root_name = fc[:-4] print root_name new_name = test_one[0] + ".shp" print new_name arcpy.Rename_management(fc, new_name) except: print "There may be a problem renaming " + str(fc) else: print "Features already encoded: " + str(fc) #Moving the files from the input folder into the EDM_Folder import Functions source = "C:\\Users\\Public\\2015Pros\\Renamed_No_Prep_Polys\\" dest = "C:\\Users\\Public\\2015Pros\\EDM_feature_polys\\" Functions.archive_originals(source, dest)
def execute(self, parameters, messages): """The source code of the tool.""" arcpy.AddMessage("\nPerforming overall merge...") logging.info("Starting mergeAreas.py script...\n") # Define variables from parameters overlapWorkspace = parameters[0].valueAsText gdbWorkspace = parameters[1].valueAsText featWorkspace = parameters[2].valueAsText # Determine list of total overlap, no overlap and to merge feature classes in overlap feature dataset workspace to process. arcpy.env.workspace = overlapWorkspace mergeList = arcpy.ListFeatureClasses("*_toMerge") totalOverlapList = arcpy.ListFeatureClasses("*_TotalOverlap") noOverlapList = arcpy.ListFeatureClasses("*_noOverlap") if len(mergeList) > 0: arcpy.AddMessage("Workspace contains the following " + str(len(mergeList)) + " feature classes to merge: " + str(mergeList)) # Organize toMerge feature classes by date mergeDictbyDate = {} for fc in mergeList: fcPath = os.path.join(overlapWorkspace, fc) fcDate = fc.split("_")[1] mergeDictbyDate[fcDate] = [fcPath] # Append no overlap feature classes toMerge feature classes by date for noOverlapFc in noOverlapList: noOverlapPath = os.path.join(overlapWorkspace, noOverlapFc) noOverlapDate = noOverlapFc.split("_")[1] mergeDictbyDate[noOverlapDate].append(noOverlapPath) # Organize dark targets feature classes by date arcpy.env.workspace = featWorkspace fcList = arcpy.ListFeatureClasses() fcDictByDate = {} for fc in fcList: fcPath = os.path.join(featWorkspace, fc) fcSplit = fc.split("_") if fcSplit[1] in fcDictByDate: fcDictByDate[fcSplit[1]].append(fcPath) else: fcDictByDate[fcSplit[1]] = [fcPath] # Iterate through dark targets acquisition dates and check for acquisition dates with more than a single feature class (for merging) for key in fcDictByDate: if len(fcDictByDate[key]) > 1: # Iterate through feature classes within acquisition date for fc in fcDictByDate[key]: fcSplit = fc.split("_") # Check for and add acquisition date toMerge feature classes if not already present if fcSplit[len(fcSplit) - 2] not in mergeDictbyDate: mergeDictbyDate[fcSplit[len(fcSplit) - 2]] = [fc] # Check for and add feature class toMerge feature classes if not already present within acquisition date else: fcValue = fc.split("\\")[len(fc.split("\\")) - 1] + "_noOverlap" fcValuePath = os.path.join(overlapWorkspace, fcValue) if fcValuePath not in mergeDictbyDate[key]: mergeDictbyDate[key].append(fc) # Iterate through dark targets acquisition dates to compile lists of feature classes to merge for key in mergeDictbyDate: arcpy.AddMessage("\nMerging feature classes in " + key + "...") logging.info("Processing merges for acquisition date '%s'", key) mergeList = [] # Iterate through feature classes within acquisition date and append them to merge list for item in mergeDictbyDate[key]: mergeList.append(item) # Merge feature classes in merge list into single feature class for the acquisition date outputDissolveString = "RS2_" + key + "_toDissolve" outputDissolve = os.path.join(overlapWorkspace, outputDissolveString) arcpy.Merge_management(mergeList, outputDissolve) logging.info( "Merge: '%s' created from merging the following feature classes: '%s'", outputDissolve, str(mergeList)) # Dissolve attribute duplicates and rename fields arcpy.AddMessage("Dissolving...") dissolveLyr = "dissolveLyr" outputMergeString = "RS2_" + key + "_merged" outputMerge = os.path.join(gdbWorkspace, outputMergeString) dissolveFields = ["Pid", "RsatID"] fieldList = arcpy.ListFields(outputDissolve) statsFields = [] for field in fieldList: if "OBJECTID" in field.name or "FID" in field.name or "Shape" in field.name or field.name in dissolveFields or field.name == "ID": continue statsField = [field.name, "FIRST"] statsFields.append(statsField) arcpy.MakeFeatureLayer_management(outputDissolve, dissolveLyr) logging.info( "Make Feature Layer: '%s' layer created from '%s' feature class", dissolveLyr, outputDissolve) arcpy.Dissolve_management(dissolveLyr, outputMerge, dissolveFields, statsFields) logging.info( "Dissolve: '%s' feature class created from '%s' layer dissolve", outputMerge, dissolveLyr) fieldList = arcpy.ListFields(outputMerge) for field in fieldList: if field.name.startswith("FIRST_"): newName = field.name[6:] arcpy.AlterField_management(outputMerge, field.name, newName) # Update targetID with combined target ID for overlapping features arcpy.AddMessage("Updating targetID...") finalOutputString = "RS2_" + key overlapBool = False # Iterate through total overlap feature classes for fc in totalOverlapList: # Check for merged acquisition date feature class containing overlapping features (by finding equivalent total overlap feature class) if finalOutputString == fc.strip("_TotalOverlap"): overlapBool = True # Perform spatial join to access targetID field from total overlap feature class totalOverlapFc = os.path.join(overlapWorkspace, fc) finalOutput = os.path.join(gdbWorkspace, finalOutputString) fieldmappings = arcpy.FieldMappings() fieldmappings.addTable(outputMerge) fldmap_TARGETID = arcpy.FieldMap() fldmap_TARGETID.addInputField(totalOverlapFc, "targetID") fld_TARGETID = fldmap_TARGETID.outputField fld_TARGETID.name = "targetID_1" fldmap_TARGETID.outputField = fld_TARGETID fieldmappings.addFieldMap(fldmap_TARGETID) arcpy.SpatialJoin_analysis(outputMerge, totalOverlapFc, finalOutput, "#", "#", fieldmappings) logging.info( "Spatial Join: '%s' feature class created by joining '%s' with '%s'", finalOutput, outputMerge, totalOverlapFc) # Update targetID with combined targetID determined from total overlap feature class expression = "copyTargetID(!targetID!, !targetID_1!)" codeblock = """def copyTargetID(targetID, comb_targetID): if comb_targetID is None: return targetID else: return comb_targetID""" arcpy.CalculateField_management(finalOutput, "targetID", expression, "PYTHON_9.3", codeblock) logging.info( "Calculate Field: 'targetID' field value calculated for '%s' feature class", finalOutput) # Delete extraneous fields arcpy.DeleteField_management(finalOutput, "targetID_1") arcpy.DeleteField_management(finalOutput, "Join_Count") arcpy.DeleteField_management(finalOutput, "TARGET_FID") # Rename merged acquisition date feature class to appropriate name if it does not contain overlapping targets if overlapBool is False: arcpy.Rename_management(outputMerge, finalOutputString) logging.info("Rename: '%s' feature class renamed to '%s'", outputMerge, finalOutputString) # Delete unneeded process outputs (dissolve and merge outputs) arcpy.Delete_management(outputDissolve) logging.info("Delete: '%s' feature class deleted", outputDissolve) if arcpy.Exists(outputMerge): arcpy.Delete_management(outputMerge) logging.info("Delete: '%s' feature class deleted", outputMerge) logging.info( "Processing for merges for acquisition date '%s' complete\n", key) # Iterate through dark targets acquisition dates to export single feature classes arcpy.AddMessage("\nExporting single feature classes...") logging.info("Processing single feature classes to export") for key in fcDictByDate: if len(fcDictByDate[key]) == 1: for fc in fcList: fcSplit = fc.split("_") if fcSplit[1] in mergeDictbyDate: continue else: outputFeatureName = "RS2_" + fcSplit[1] arcpy.FeatureClassToFeatureClass_conversion( fc, gdbWorkspace, outputFeatureName, "#", "#", ) logging.info( "Feature Class to Feature Class: '%s' feature class converted to '%s'", fc, outputFeatureName) outputFeatPath = os.path.join(gdbWorkspace, outputFeatureName) arcpy.DeleteField_management(outputFeatPath, "FID") logging.info("Processing of single feature classes to export complete") logging.info("mergeAreas.py script finished\n\n") return
import arcpy from arcpy import env env.workspace = r'D:\Clark_County_2017_ImageClassification_Project\book_161OtherReclass' fcs = arcpy.ListRasters() for fc in fcs: print fc arcpy.Rename_management(fc, fc[:-6] + '.tif')
def Extract_Temprature(gdbpath, raster_gdbpath): # Set workspace workspace = os.path.join(gdbpath) # Input owl point feature class input_path_points = os.path.join(workspace, "Eagle_Owl_Points_proj") # Directory containing rasters to be renamed raster_workspace = os.path.join(raster_gdbpath) # Create unique id for joining data field_name = "ID" field_names = [f.name for f in arcpy.ListFields(input_path_points)] if field_name in field_names: print('exists') arcpy.DeleteField_management(input_path_points, [field_name]) arcpy.AddField_management(input_path_points, field_name, "LONG", None, None, None, None, "NULLABLE", "NON_REQUIRED", None) arcpy.CalculateField_management(input_path_points, field_name, '!OBJECTID!', "PYTHON_9.3") # Path to intermediate geodatabase to store intermediate results target_workspace = r'C:\TryOuts.gdb' # Create intermediate geodatabase if os.path.isdir(target_workspace): print('exists') arcpy.Delete_management(target_workspace) print("create tryouts") arcpy.CreateFileGDB_management("C:/", "TryOuts.gdb", "9.2") # spliting data into feture classes on the basis of year_month fields to extract mean monthly temperature from rasters field = 'Year_Month' arcpy.analysis.SplitByAttributes(input_path_points, target_workspace, field) print('Feature spliting done....') # Rename temprature rasters according to the months and year arcpy.env.workspace = raster_workspace for raster in arcpy.ListRasters(): # get the raster name and file extension fileName, fileExtension = os.path.splitext(raster) # fileNameParts like TAMM_01_2012_01_proj # fileNameParts[0] = TAMM # fileNameParts[1] = 01 # fileNameParts[2] = 2012 # fileNameParts[3] = 01 # fileNameParts[4] = proj fileNameParts = fileName.split('_') if (len(fileNameParts) > 1): compactFileName = "T" + fileNameParts[2] + fileNameParts[ 1] + fileExtension arcpy.Rename_management(raster, compactFileName) print('Renaming rasters done') # Extract Temperature Values to Points if arcpy.CheckExtension('Spatial') == 'Available': arcpy.CheckOutExtension('Spatial') # Start a loop in the raster list and within that loop strip the "." extension off the file name and replace with ".tif" arcpy.env.workspace = raster_workspace rasters = arcpy.ListRasters() for ras in rasters: tiff = ras.replace(".", ".tif") #Check to make sure the feature class name exists that corresponds to the raster name if arcpy.Exists(os.path.join(target_workspace, tiff)): #Extract the raster values to the points arcpy.sa.ExtractMultiValuesToPoints( os.path.join(target_workspace, tiff), ras) arcpy.CheckInExtension('Spatial') print('extraction of temprature on each point done') #adding mean temprature field to original point data new_field_name = 'MeanTemp' # Check if the field already exists Main_featureclass = input_path_points field_names = [f.name for f in arcpy.ListFields(Main_featureclass)] if new_field_name in field_names: print('exists') arcpy.DeleteField_management(Main_featureclass, [new_field_name]) # Add field (if it not exists) arcpy.management.AddField(Main_featureclass, new_field_name, "LONG", None, None, None, None, "NULLABLE", "NON_REQUIRED", None) # Store year_month data in numpy to get unique values list data = arcpy.da.TableToNumPyArray(Main_featureclass, 'Year_Month') uniq = numpy.unique(data['Year_Month']) querystring = "" for uniquemonth in uniq: columnname = "T" + uniquemonth field_names = [f.name for f in arcpy.ListFields(Main_featureclass)] if columnname in field_names: print('exists') arcpy.DeleteField_management(Main_featureclass, [columnname]) print(uniquemonth) arcpy.JoinField_management(Main_featureclass, "ID", target_workspace + "\T" + uniquemonth, "ID", ["T" + uniquemonth]) arcpy.management.CalculateField( Main_featureclass, columnname, "updateValue(!" + columnname + "!)", "PYTHON_9.3", r"def updateValue(value):\n if value == None:\n return '0'\n else: return value" ) querystring = querystring + "+!" + columnname + "!" querystring = querystring[1:] print("data joining with original dataset is done") # Merge all tempratures in one column arcpy.management.CalculateField(Main_featureclass, new_field_name, querystring, "PYTHON_9.3", None) # Delete joined multiple fields from original dataset for uniquemonth in uniq: columnname = "T" + uniquemonth field_names = [f.name for f in arcpy.ListFields(Main_featureclass)] if columnname in field_names: print('exists') arcpy.DeleteField_management(Main_featureclass, [columnname]) # Create new field to divide extracted temprature according to metadata of temprature rasters new_field_name1 = 'MeanTemp_10' # Check if the field already exists field_names = [f.name for f in arcpy.ListFields(Main_featureclass)] if new_field_name1 in field_names: print('exists') arcpy.DeleteField_management(Main_featureclass, [new_field_name1]) # Add field (if it not exists) arcpy.management.AddField(Main_featureclass, new_field_name1, "DOUBLE", None, None, None, None, "NULLABLE", "NON_REQUIRED", None) arcpy.management.CalculateField(Main_featureclass, new_field_name1, "!" + new_field_name + "!", "PYTHON_9.3", None) arcpy.management.CalculateField(Main_featureclass, new_field_name1, "!" + new_field_name1 + "!/10", "PYTHON_9.3", None)
# -*- coding: utf-8 -*- import arcpy from arcpy import env arcpy.env.workspace = r"C:/3Dbymodell/fredrikstad.gdb" soner = r"C:/3Dbymodell/ToolData/ToolData.gdb/Levekarssoner" navner = {} cursor = arcpy.SearchCursor(soner, fields="NAVN, NR", sort_fields="NR A") for row in cursor: navner[row.getValue("NR")] = row.getValue("NAVN").replace(" ", "_") fcs = arcpy.ListFeatureClasses() for fc in fcs: arcpy.Rename_management(fc, navner[int(fc.strip("B_"))])
def fcs_in_workspace(InputFolder): # Create output folder if not os.path.exists(OutputFolder): os.makedirs(OutputFolder) # Create loop for copy feature suffix = ".shp" for root, subFolders, files in os.walk(InputFolder): for fileName in files: if fileName.endswith(suffix): arcpy.env.workspace = root fclist = arcpy.ListFeatureClasses() for fc in fclist: fc_copy = os.path.join(OutputFolder, fc.strip(".shp")) fc_path = os.path.join(OutputFolder, fc) # fc must be start with upper case character to copy feature correctly arcpy.CopyFeatures_management(fc, fc_copy) # Describe copy featured shapefiles desc = arcpy.Describe(fc_path) # if copy feature is _____ if desc.name == "Income_EA.shp": # List the copy feature attribute fields fields = arcpy.ListFields(fc_path) # Manually enter field names to keep here # include mandatory fields name in keepfields keepFields = [ "FID", "Shape", "EA", "EA_STR", "OID_", "COL_60" ] # Automatically drop fields dropFields = [ x.name for x in fields if x.name not in keepFields ] # Delete fields arcpy.DeleteField_management(fc_path, dropFields) # Rename arcpy.Rename_management( fc_path, "Household_Average_Income_Income_1991_DA.shp") # if copy feature is _____ elif desc.name == "Education_income_CT.shp": # List the copy feature attribute fields fields = arcpy.ListFields(fc_path) # Manually enter field names to keep here # include mandatory fields name in keepfields keepFields = [ "FID", "Shape", "AREA", "PERIMETER", "G91CT0_", "G91CT0_ID", "CA", "CT_NAME", "PROV", "CA_CTNAME", "OID_", "COL160" ] # Automatically drop fields dropFields = [ x.name for x in fields if x.name not in keepFields ] # Delete fields arcpy.DeleteField_management(fc_path, dropFields) # Rename arcpy.Rename_management( fc_path, "Household_Average_Income_Education_1991_CT.shp")
Version 0.1 Created by: Juel Paul/Land Analytical Date: February 03, 2020 --------------------------------------------------------------------------------""" # Import modules import arcpy from arcpy import env import os inGDB = arcpy.GetParameterAsText(0) user_string = arcpy.GetParameterAsText(1) # Set workspace and environment variables arcpy.env.workspace = inGDB arcpy.env.overwriteOutput = True fc_list = arcpy.ListFeatureClasses(feature_type='Polygon') for fc in fc_list: desc = arcpy.Describe(fc) ftr_name = desc.Name if user_string in ftr_name: out_filename = ftr_name.split(user_string)[-1] arcpy.Rename_management(ftr_name, out_filename) arcpy.AddMessage("This is name: '{0}' and '{1}'".format( ftr_name, out_filename)) else: arcpy.AddMessage("User string not present for '{0}'".format(ftr_name))
# Import system modules import arcpy print("Program starting...") # Set up workspace and then list the feature classes for that workspace arcpy.env.workspace = r"S:\PROGRAMMES\Globe_2021\ConfidencePolygons\PalaeoAtlases\Atlases\Ziegler_WTethys_Atlas\ZIegler_redigitised_paleo.gdb" featureclasses = arcpy.ListFeatureClasses() # Iterate through the feature class list and rename each file for file_name in featureclasses: new_name = "_15427_plate_0_" + file_name arcpy.Rename_management(file_name, new_name, "FeatureClass") print("Program Complete.")
# -*- coding: cp936 -*- #导入arcpy模块 ''' ''' import arcpy arcpy.env.workspace = r"G:\治多土地利用成果_拓扑检查结果汇总(1月16号)\0擦除\治多合并结果\merge" fcs = arcpy.ListFeatureClasses("*","All","") for fc in fcs: print fc #length = len(str(fc)) New_Name = str(fc[:-10]) #7 print New_Name arcpy.Rename_management(fc,New_Name) print "" print "finish"
logging.basicConfig(filename=log_file_path, level=logging.DEBUG) # arcpy.env.workspace = temp_dir # set temporary files directory (used when calling Reclassify(), etc.), could use arcpy.env.workspace = "in_memory" ## Delete if existing and create temp directory # if os.path.exists(temp_dir): # shutil.rmtree(temp_dir) # os.makedirs(temp_dir) # arcpy.env.workspace = temp_dir # set temporary files directory (used when calling Reclassify(), etc.) for idx_zone, zone in enumerate(params['zones']): for idx_subzone, subzone in enumerate(params['subzones']): UTMz_name = '%d%s' % (zone, subzone) map_dir = os.path.join(UTM_dir, 'UTM_' + UTMz_name, 'YAI') for old_name, new_name in params['renaming_dict'].iteritems(): # print('\t\t %s' % targ) try: file_key = os.path.join(map_dir, '*_' + old_name + '_YAI_2010.dat') map_path_old = glob.glob(file_key)[0] map_path_new = map_path_old.replace(old_name, new_name) except: logging.info('No such file: %s' % (file_key)) continue arcpy.Rename_management(map_path_old, map_path_new)
origianlgdb_path = mainpath + original_gdbname outputgdbpath = mainpath + out_gdbname if arcpy.Exists(outputgdbpath): shutil.rmtree(outputgdbpath) print('正在复制gdb') shutil.copytree(origianlgdb_path, outputgdbpath) # 开始处理 arcpy.env.workspace = outputgdbpath featuredatasets_list = arcpy.ListDatasets() print('gdb包含要素类:', featuredatasets_list ) # gdb包含要素类 ['LcrDataset', 'StrDataset', 'TraDataset'] for dataset in featuredatasets_list: if dataset in muban_featuredatasetcode_list: featurelist = arcpy.ListFeatureClasses(feature_dataset=dataset) print('\n', "开始处理要素类:", dataset, '包括要素:', featurelist) for featurecode in featurelist: select = excel_df.loc[excel_df['数据代码'] == featurecode] if select.empty is not True: featurename = select.iloc[0, 2] # 修改要素名称 print('正在修改要素:', featurename, featurecode) arcpy.Rename_management(featurecode, featurename) # 修改要素类名称 featuredataset_code = dataset featuredataset_name = muban_featuredatasetname_list[ muban_featuredatasetcode_list.index(dataset)] print('正在修改要素类:', featuredataset_name, featuredataset_code) arcpy.Rename_management(featuredataset_code, featuredataset_name) print("清洗完成!!!")