def update_layer(layer, file_path, verify=False): """ :param layer: An Arc Layer object to be updated. :param file_path: A new datasource. :param verify: If true will validate the datasource after the layer is updated. :param mxd: Pass a reference to an MXD document if the layer is in an mxd, needed due to a bug with saving layers in a windows USERPROFILE directory. :return: The updated ext. """ for lyr in arcpy.mapping.ListLayers(layer): if lyr.supports("DATASOURCE"): try: logging.debug("layer: {0}".format(lyr)) logger.debug("removing old layer workspacePath: {0}".format(lyr.workspacePath)) except Exception: # Skip layers that don't have paths. continue try: # Try to update the extents based on the layers logger.debug("Updating layers from {0} to {1}".format(lyr.workspacePath, file_path)) # For the tif file we want the workspace path to be the directory not the DB name. if os.path.splitext(file_path)[1] == '.tif': lyr.replaceDataSource(os.path.dirname(file_path), "NONE", os.path.basename(file_path)) else: lyr.findAndReplaceWorkspacePath(lyr.workspacePath, file_path, verify) if lyr.isFeatureLayer: logger.debug(arcpy.RecalculateFeatureClassExtent_management(lyr).getMessages()) except AttributeError as ae: raise
def get_temp_mxd(gpkg): temp_file = tempfile.NamedTemporaryFile(mode='rb', delete=False) temp_file.close() temp_file.name = "{0}.mxd".format(temp_file.name) try: template_file = os.path.abspath(os.path.join(BASE_DIR, "eventkit_arcgis_service", "static", "template.mxd")) logger.debug('Opening MXD: {0}'.format(template_file)) shutil.copyfile(template_file, temp_file.name) mxd = arcpy.mapping.MapDocument(temp_file.name) ext = None for lyr in arcpy.mapping.ListLayers(mxd): logging.debug(lyr) try: print lyr.workspacePath except Exception: continue try: lyr.findAndReplaceWorkspacePath(lyr.workspacePath, gpkg, True) if lyr.isFeatureLayer and lyr.name != "main.boundary": arcpy.RecalculateFeatureClassExtent_management(lyr) ext = lyr.getExtent() except Exception as e: print str(e) logger.debug('Getting dataframes...') df = arcpy.mapping.ListDataFrames(mxd, "Layers")[0] df.extent = ext mxd.save() del mxd #remove handle on file yield temp_file.name finally: # temp_file_name = temp_file.name temp_file.close()
def copy_layer_over_PR(input_connection, output_connection, target_db, pub_layerfullname): print "Entering Copy Projection----" # Configure Connections input_connection = input_connection + '\\' + current_db + '.' + pub_layerfullname + '_PR' output_connection = output_connection + '\\' + target_db + '.' + pub_layerfullname # Set workspace and keyword arcpy.env.workspace = output_connection arcpy.env.configKeyword = "Geometry" # Copy Over try: arcpy.Copy_management(input_connection, output_connection) print "Layer successfully copied to " + output_connection print pub_layerfullname + " has successfully been published to its target destination of " + target_db + " on " + pub_targetdb_type + "." except Exception as error_copy_layer_over_PR_step1: print "Status: Failure!" print(error_copy_layer_over_PR_step1.args[0]) try: arcpy.RecalculateFeatureClassExtent_management(output_connection) except Exception as error_copy_layer_over_PR_step2: print "Status: Failure!" print(error_copy_layer_over_PR_step2.args[0]) print "Leaving Copy Projection----" return
def copy_layer_over(input_connection, output_connection, target_db, pub_layerfullname): print "Entering Copy Layer----" # Configure Connections input_connection = input_connection + '\\' + current_db + '.' + pub_layerfullname output_connection = output_connection + '\\' + target_db + '.' + pub_layerfullname # Set workspace and keyword arcpy.env.workspace = output_connection arcpy.env.configKeyword = "Geometry" # Copy Over try: arcpy.Copy_management(input_connection, output_connection) print "Layer successfully copied to " + output_connection except Exception as error_copy_layer_over_step1: print "Status: Failure!" print(error_copy_layer_over_step1.args[0]) try: arcpy.RecalculateFeatureClassExtent_management(output_connection) except Exception as error_copy_layer_over_step2: print "Status: Failure!" print(error_copy_layer_over_step2.args[0]) print "Leaving Copy Layer----" return
def recalculate_extent(fc): if arcpy.CheckProduct("ArcInfo") == "Available" \ or arcpy.CheckProduct("ArcEditor") == "Available": arcpy.RecalculateFeatureClassExtent_management(fc) else: arcpy.CompressFileGeodatabaseData_management(fc) arcpy.UncompressFileGeodatabaseData_management(fc)
def update_layer(layer, file_path, type, projection=None, verify=False): """ :param layer: An Arc Layer object to be updated. :param file_path: A new datasource. :param verify: If true will validate the datasource after the layer is updated. :return: The updated ext. """ for lyr in arcpy.mapping.ListLayers(layer): try: if lyr.supports("DATASOURCE"): try: logger.debug("layer: {0}".format(lyr)) logger.debug( "removing old layer workspacePath: {0}".format( lyr.workspacePath)) except Exception: # Skip layers that don't have paths. continue try: # Try to update the extents based on the layers logger.debug("Updating layers from {0} to {1}".format( lyr.workspacePath, file_path)) if type == "raster" and os.path.splitext( file_path)[1] != ".gpkg": logger.debug("Replacing Datasource") lyr.replaceDataSource(os.path.dirname(file_path), "RASTER_WORKSPACE", os.path.basename(file_path), verify) elif type == "elevation": logger.debug("updating elevation") lyr.replaceDataSource(os.path.dirname(file_path), "NONE", os.path.basename(file_path), verify) else: logger.debug("updating raster or vector gpkg") logger.debug("Replacing WorkSpace Path") lyr.findAndReplaceWorkspacePath( lyr.workspacePath, file_path, verify) if lyr.isFeatureLayer: logger.debug( arcpy.RecalculateFeatureClassExtent_management( lyr).getMessages()) except Exception as e: print(arcpy.GetMessages(1)) print(arcpy.GetMessages(2)) logger.error(e) # raise finally: del lyr
def recalc_extents(conn_string, fc_target_fullnamewdb): ##print ("Entering Recalcaulte Extents----") print(" Calculating Extent: {0}".format(fc_target_fullnamewdb)) output_connection = conn_string + '\\' + fc_target_fullnamewdb try: arcpy.RecalculateFeatureClassExtent_management(output_connection) print(" --Status: Success!\n") except Exception as error_extents: print(" --Status: Failure!\n") print(error_extents.args[0]) ##print ("----Leaving Recalcaulte Extents") return
"VB") #arcpy.CalculateField_management(ssReportLayer,"SMARTscan_Report.Overall_Condition_Score_for_Drain","[CSV_Out_modified.csv.Overall Condition Score for Drain]","VB") #arcpy.CalculateField_management(ssReportLayer,"SMARTscan_Report.SERV_GRADE","[CSV_Out_modified.csv.Overall Condition Score for Drain]","VB") #arcpy.CalculateField_management(ssReportLayer,"SMARTscan_Report.Road","[CSV_Out_modified.csv.Road]","VB") #arcpy.CalculateField_management(ssReportLayer,"SMARTscan_Report.LOC_ST","[CSV_Out_modified.csv.Road]","VB") arcpy.CalculateField_management(ssReportLayer, "SMARTscan_Report.Photo_One", "[newCSV.csv.Photo\nOne]", "VB") #arcpy.CalculateField_management(ssReportLayer,"SMARTscan_Report.Photo_Two","[CSV_Out_modified.csv.Photo\nTwo]","VB") #arcpy.CalculateField_management(ssReportLayer,"SMARTscan_Report.Photo_Three","[CSV_Out_modified.csv.Photo\nThree]","VB") print "\nInfo: Field calculation is completed" arcpy.RemoveJoin_management(ssReportLayer) # Recalculate SMARTscan Extents arcpy.RecalculateFeatureClassExtent_management(ssReport) print "\nInfo: Recalculation for feature class extension is completed" # Remove intermediate layers if arcpy.Exists(startEndtemp) == True: arcpy.Delete_management(startEndtemp) if arcpy.Exists(ssTrace) == True: arcpy.Delete_management(ssTrace) del ssReportLayer del startEnd del startEN del endEN del arcpy
def limiting_planes(point_file,grid_dem,processing_extent,cellSize): try: arcpy.RecalculateFeatureClassExtent_management(point_file) except: pass point_desc = arcpy.Describe(point_file) try: # Retrieve the selected points (works if the input is a layer) point_set = point_desc.FIDSet except: #makes a layer first if the input is a file if arcpy.GetInstallInfo()['ProductName'] == 'Desktop': point_file = arcpy.mapping.Layer(point_file) else: #mask_file = arcpy.mp.Layer(mask_file) point_file = arcpy.MakeFeatureLayer_management(point_file) point_desc = arcpy.Describe(point_file) # Retrieve the selected points point_set = point_desc.FIDSet if len(point_set) > 0: point_set = point_set.split(';') point_set = [int(x) for x in point_set] if point_desc.shapeType != "Point": arcpy.AddError('The limiting planes layer must be a point layer') else: # Check if the points and DEM are in the same SRC. Reproject the points if necessary src_point = point_desc.extent.spatialReference.name src_dem = arcpy.env.outputCoordinateSystem.name if verbose: arcpy.AddMessage('SRC point: {}, SRC dem: {}, src are equal: {}'.format(src_point,src_dem,src_point == src_dem)) if src_point != src_dem: # Generate a random name for intermediate data (to avoid conflict if previous intermediate data weren't correctly deleted) lower_alphabet = string.ascii_lowercase random_string =''.join(random.choice(lower_alphabet) for i in range(5)) name_temp = 'point_temp_' + random_string if saveInGDB: point_file_temp = os.path.join(ws,name_temp) else: point_file_temp = os.path.join(ws,name_temp + '.shp') arcpy.management.Project(point_file, point_file_temp, dem_desc.spatialReference) arcpy.AddMessage('Point file reprojected in the spatial reference of the DEM') point_file = point_file_temp if arcpy.GetInstallInfo()['ProductName'] == 'Desktop': point_file = arcpy.mapping.Layer(point_file) else: point_file = arcpy.MakeFeatureLayer_management(point_file) point_desc = arcpy.Describe(point_file) oid_fieldname = point_desc.OIDFieldName #create empty plane matrix s2 = grid_dem.shape planes = np.ones(s2) planes = -np.inf * planes dip_fieldname = None dipdir_fieldname = None point_file_fields = [f.name for f in arcpy.ListFields(point_file)] dip_name_allowed = ['dip_angle','dip angle','angle','slope','dip'] dipdir_name_allowed = ['dir','direction','azimut','azi','dipdir','dip_dir','dip_direction','dip direction'] for name in point_file_fields: if name.lower() in dip_name_allowed: dip_fieldname = name for name in point_file_fields: if name.lower() in dipdir_name_allowed: dipdir_fieldname = name if dip_fieldname is None: arcpy.AddError('Dip field not found. Available fields {}. Accepted fieldnames: {}'.format(', '.join(point_file_fields),', '.join(dip_name_allowed))) if dipdir_fieldname is None: arcpy.AddError('Dip direction field not found. Available fields {}. Accepted fieldnames: {}'.format(', '.join(point_file_fields),', '.join(dipdir_name_allowed))) if verbose: arcpy.AddMessage('Dip field name: {}'.format(dip_fieldname)) arcpy.AddMessage('Dip direction field name: {}'.format(dipdir_fieldname)) fields = [oid_fieldname,'SHAPE@',dipdir_fieldname,dip_fieldname] rows = arcpy.da.SearchCursor(point_file,fields) if len(point_set) > 0: for row in rows: feat = row[0] if int(feat) in point_set: plane_temp = calculate_plane(row,s2,point_desc.hasZ,processing_extent,cellSize) planes[plane_temp>planes]=plane_temp[plane_temp>planes] else: pass else: #no selected points --> takes all for row in rows: plane_temp = calculate_plane(row,s2,point_desc.hasZ,processing_extent,cellSize) planes[plane_temp>planes]=plane_temp[plane_temp>planes] planes[planes==-np.inf]=np.nan planes = np.flipud(planes) arcpy.AddMessage('Planes constaint max={}, min={}, average={}'.format(np.max(planes),np.min(planes),np.mean(planes))) return planes
if saveInGDB: summaryTableName = 'SLBL_results' else: summaryTableName = 'SLBL_results.dbf' summaryTable = os.path.join(ws,summaryTableName) arcpy.env.workspace = ws grid_dem_file = arcpy.Raster(grid_dem_file) #grid_dem_lyr = os.path.basename(grid_dem_file) #arcpy.MakeRasterLayer_management (grid_dem_file, grid_dem_lyr, "", "", "1") # Convert the polygon features to a raster mask try: arcpy.RecalculateFeatureClassExtent_management(mask_file) except: pass mask_desc = arcpy.Describe(mask_file) try: # Retrieve the selected polygons (works if the input is a layer) Set = mask_desc.FIDSet except: #makes a layer first if the input is a file if arcpy.GetInstallInfo()['ProductName'] == 'Desktop': mask_file = arcpy.mapping.Layer(mask_file) else: #mask_file = arcpy.mp.Layer(mask_file) mask_file = arcpy.MakeFeatureLayer_management(mask_file) mask_desc = arcpy.Describe(mask_file) # Retrieve the selected polygons
arcpy.InterpolateShape_3d(dem, clippedName, ZName, sampleDistance, zFactor) if os.path.basename(fc) == xsLine: ### this isn't very robust! if debug: addMsgAndPrint( ' **Making line segments of surface mapunit polys**') SMUL = xsFDS + '/CS' + token + 'SurfaceMapUnitLines' testAndDelete(SMUL) arcpy.Intersect_analysis( [ZName, gdb + '/GeologicMap/MapUnitPolys'], SMUL) minY, maxY = transformAndSwap(SMUL, xsFDS + '/trans_SurfaceMapUnitLines', linkFeatures, tanPlunge) testAndDelete(xsFDS + '/trans_SurfaceMapUnitLines') arcpy.RecalculateFeatureClassExtent_management(SMUL) if debug: addMsgAndPrint( ' **Making points where section line crosses contacts**' ) addMsgAndPrint(ZName) addMsgAndPrint(' hasZ = ' + str(arcpy.Describe(ZName).hasZ)) SCAF1 = xsFDS + '/CS' + token + 'SurfaceCAF_pts1' SCAF = xsFDS + '/CS' + token + 'SurfaceCAF_pts' testAndDelete(SCAF1) testAndDelete(SCAF) arcpy.Intersect_analysis( [ZName, gdb + '/GeologicMap/ContactsAndFaults'], SCAF1, 'ALL', '#', 'POINT')
print("File is located at " + str(outLocation)) #write irregularities to a file and delete them indata = "xyLineOutput.shp" xyRows1 = arcpy.da.UpdateCursor(indata, ["X", msagID]) xyRows2 = arcpy.da.UpdateCursor(indata, ["X1", msagID]) f = open(str(outLocation) + "/irregularities.csv", "a") for row in xyRows1: if int(row[0]) < 1: f.write(str(row[1])+",\n") xyRows1.deleteRow() for row in xyRows2: if int(row[0]) < 1: f.write(str(row[1])+",\n") xyRows2.deleteRow() #determine the new extent with the irregularities removed arcpy.RecalculateFeatureClassExtent_management(indata) #delete not needed data arcpy.Delete_management("toCountTEMP.csv") arcpy.Delete_management("zipExp.csv") arcpy.Delete_management("msagGDB.dbf") arcpy.Delete_management("msagTableEDIT.dbf") arcpy.Delete_management("msagTableEDIT.csv") print("Code is finished. Exiting program...")
flds.remove(fld) flds.append('SHAPE@') if not arcpy.Exists(destFC): print 'Creating:', destFC arcpy.CreateFeatureclass_management(fds, "AerialCable", "POLYLINE", sourceFC, "SAME_AS_TEMPLATE", "SAME_AS_TEMPLATE", sr) arcpy.AlterAliasName(destFC, 'Aerial Cable') else: print('{0} exists, truncating table').format(destFC) arcpy.TruncateTable_management(destFC) with arcpy.da.Editor(wksp) as edit: ic = arcpy.da.InsertCursor(destFC, flds) print 'Inserting Aerial Cable...' with arcpy.da.SearchCursor(sourceFC, flds, whereSQL) as sc: for row in sc: ic.insertRow(row) arcpy.RecalculateFeatureClassExtent_management(destFC) getRowCount(destFC, ["*"], whereSQL) getRowCount(sourceFC, ["*"], whereSQL) #result = arcpy.GetCount_management(searchTable) #print('ElectricSearch '),int(result.getOutput(0)) print 'finished' # list comprehension # rows = [row for row in arcpy.da.SearchCursor(r'Database Connections\MapEDPR_ArcFM.sde\mapedpr.ARCFM_ED.eFOREIGNATTACHMENT','ATTACHMENTZONE',where_clause="ATTACHMENTZONE = 'SUPPLY'")] # len(rows)
with arcpy.da.SearchCursor(connectionRoot % (fc), fldsFeederID) as sc: for scrow in sc: feeders = checkCkt(scrow[2]) + checkCkt(scrow[3]) tiepoint = None tiePoint = isTiePoint(feeders) #print tiePoint row = ((checkValue(scrow[0]), changeAliasName(fc), checkValue(scrow[1]), feeders, tiePoint, changeAliasName(fc) + '-' + checkValue(scrow[0]), scrow[4])) ic.insertRow(row) del sc del ic for fc in fcFeederIDs: ic = arcpy.da.InsertCursor(searchTable, elecFldsDest) print 'Inserting...', changeAliasName(fc) with arcpy.da.SearchCursor(connectionRoot % (fc), fldsFeederIDs) as sc: for scrow in sc: row = ((checkValue(scrow[0]), changeAliasName(fc), checkValue(scrow[1]), scrow[2], None, changeAliasName(fc) + '-' + checkValue(scrow[0]), scrow[3])) ic.insertRow(row) del sc del ic arcpy.RecalculateFeatureClassExtent_management(searchTable) result = arcpy.GetCount_management(searchTable) print('ElectricSearch '), int(result.getOutput(0)) print 'finished'
import arcpy # fGDB variables OH_Primary = r'C:\arcdata\transfer\MIMS_Electric_Extract.gdb\Electric\ePriOHElectricLineCond' UG_Primary = r'C:\arcdata\transfer\MIMS_Electric_Extract.gdb\Electric\ePriUGElectricLineCond' mimsFDS = r'C:\arcdata\transfer\MIMS_Electric_Extract.gdb\MIMS' backbone = r'C:\arcdata\transfer\MIMS_Electric_Extract.gdb\MIMS\mmBackbone' arcpy.CreateFeatureclass_management(mimsFDS, 'mmBackbone', 'POLYLINE') arcpy.AlterAliasName(backbone, 'Backbone') arcpy.AddField_management(backbone, 'FEEDERID', 'TEXT', field_alias='Circuit Number') arcpy.MakeFeatureLayer_management(OH_Primary, 'OHBackbone', "BACKBONEINDICATOR = 'Y'") arcpy.MakeFeatureLayer_management(UG_Primary, 'UGBackbone', "BACKBONEINDICATOR = 'Y'") arcpy.Append_management(['OHBackbone', 'UGBackbone'], backbone, 'NO_TEST') arcpy.RecalculateFeatureClassExtent_management(backbone) result = arcpy.GetCount_management(backbone) print('Total Backbone '), int(result.getOutput(0)) print 'finished'
'SYMBOLROTATION', 'STATUS', 'INSTALL_NUM', 'UNIT_TYPE', 'AGREEMENT', 'STREETADDRESS', 'GRUGISID', 'GLOBALID', 'STRUCTUREID', 'FACILITYID', 'StockNumber', 'StockNumber_Arm', 'eSupportStructure_GLOBALID', 'StandardLabel', 'DisplayLabel', 'SHAPE@' ] def getRowCount(tbl, flds, whereSQL=None): rows = [row for row in arcpy.da.SearchCursor(tbl, flds, whereSQL)] if len(rows) > 1: print('Found %i in %s') % (len(rows), tbl) return len(rows) if arcpy.Exists(ocLights): print 'Truncating...', ocLights arcpy.TruncateTable_management(ocLights) # Start Main with arcpy.da.Editor(workspace) as edit: ic = arcpy.da.InsertCursor(ocLights, lightFlds) with arcpy.da.SearchCursor(lights, lightFlds, where_clause="STATUS='OC'") as sc: for scrow in sc: ic.insertRow(scrow) arcpy.RecalculateFeatureClassExtent_management(ocLights) result = arcpy.GetCount_management(ocLights) print('Orange Cappped Lights '), int(result.getOutput(0)) print 'finished'