def crawlFinal(): env.workspace = currentPathSettings.FinalPath print str(currentPathSettings.FinalPath) #list the file gebdatabases in the final directory workspace = ListWorkspaces("*", "FileGDB") for ws in workspace: print ws env.workspace = ws #print the tables in the geodatabase tablelist = ListTables() print "tables" for table in tablelist: print " " + table #print the features classes stored in feature datasets Datasets = ListDatasets("*") for fd in Datasets: print fd print "feature classes - Polygon" FCListPoly = ListFeatureClasses("*", "Polygon", fd) for fc in FCListPoly: print " " + fc print "feature classes - Lines" FCListLine = ListFeatureClasses("*", "Polyline", fd) for fc in FCListLine: print " " + fc print "feature classes - points" FCListPoint = ListFeatureClasses("*", "Point", fd) for fc in FCListPoint: print " " + fc
def LoadARegion(): from arcpy import Append_management, ListFeatureClasses, ListDatasets, env, ListTables #importGDB = r"//gisdata/planning/Cart/projects/Conflation/GIS_DATA/GEO_COMM/REGION3_20151002/REGION3_20151002.gdb" #importGDB = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\GEO_COMM\REGION4_20151021\REGION4_20151021\REGION4_20151021.gdb" importGDB = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\GEO_COMM\REGION5_20151211\REGION5_20151211.gdb" LoadTarget = r"\\gisdata\planning\Cart\projects\Conflation\Workflow\conflation_sqlgis_geo.sde\Conflation.GEO." env.workspace = importGDB ### There are no tables in the conflated dataset products - handle similarly and separately skiplist = [ 'Stitch_Lines', 'RoadCenterlines', 'Overlaps_Gaps_MunicipalBoundary', 'Overlaps_Gaps_FIRE', 'Overlaps_Gaps_LAW', 'Overlaps_Gaps_PSAP', 'Overlaps_Gaps_ESZ', 'Overlaps_Gaps_EMS', 'Overlaps_Gaps_CountyBoundary', 'Overlaps_Gaps_AuthoritativeBoundary' ] tables = ListTables() for table in tables: print table target = LoadTarget + table print target datasets = ListDatasets("*") for fd in datasets: print fd featureClasses = ListFeatureClasses("*", "All", fd) for fc in featureClasses: print fc if fc in skiplist: print 'skipping' else: target = LoadTarget + fd + "/" + fc print "loading to " + target Append_management(fc, target, schema_type="NO_TEST")
def Open(cls, path): """ """ # change the arcpy workspace for listing, but save the current setting workspace = env.workspace env.workspace = path cls.validate_geodatabase(path) # TODO: Need a generic workspace class, and a dataset class datasets = ListDatasets() fcs_names = ListFeatureClasses() rasters_names = ListRasters() tables_names = ListTables() # take all the found layers and make into layer objects fcs = [] for fc in fcs_names: fcs.append(Layer(os.path.join(path, fc))) rasters = [] for raster in rasters_names: rasters.append(Layer(os.path.join(path, raster))) tables = [] for table in tables_names: tables.append(Layer(os.path.join(path, table))) # set the workspace back for the user env.workspace = workspace return Geodatabase(path, datasets, fcs, rasters, tables)
def TrackEditsAndVersion(): env.workspace = OracleDB fclist = ListFeatureClasses() for fc in fclist: print fc if str(fc)[-2:] == 'MV': print "no actions taken on a Materialized View" pass else: if ListFields(fc, "GlobalID"): print "GlobalID Field already added" else: AddField_management(OracleDB + "/" + fc, "GlobalID", "GUID", "#", "#", "#", "GlobalID", "NON_NULLABLE", "REQUIRED") AddField_management(OracleDB + "/" + fc, "START_DATE", "DATE", "#", "#", "#", "Start_Date", "NULLABLE", "NON_REQUIRED") AddField_management(OracleDB + "/" + fc, "END_DATE", "DATE", "#", "#", "#", "End_Date", "NULLABLE", "NON_REQUIRED") EnableEditorTracking_management(OracleDB + "/" + fc, creator_field="Creator", creation_date_field="Created", last_editor_field="Editor", last_edit_date_field="Edited", add_fields="ADD_FIELDS", record_dates_in="UTC") RegisterAsVersioned_management(OracleDB + "/" + fc, "NO_EDITS_TO_BASE")
def process_feature_classes(input_ws, output_ws, foreach_layer=None): """ processes each featureclass with an optional function input_ws - the database or dataset path to process feature classes output_ws - the output for the feature classes foreach_layer - the function to process the feature classes """ from arcpy import env, ListFeatureClasses, FeatureClassToGeodatabase_conversion, AddWarning, AddMessage from os.path import join env.workspace = input_ws feature_classes = ListFeatureClasses() for feature_class in feature_classes: AddMessage('Processing {}...'.format(feature_class)) try: if foreach_layer: foreach_layer(input_ws, output_ws, feature_class) else: #copy each feature class over output_path = join(output_ws, get_name(feature_class)) delete_existing(output_path) FeatureClassToGeodatabase_conversion(feature_class, output_ws) except Exception as e: AddWarning('Error processing feature class {} - {}'.format( feature_class, e))
def ExamineGDB(gdb): import ntpath, re reviewpath = ntpath.basename(gdb) from arcpy import env, ListWorkspaces, ListDatasets, ListTables, ListFeatureClasses, GetCount_management, Compact_management, ListFields #set the workspace from the config file env.workspace = ntpath.dirname(gdb) ng911 = gdb print "geodatabases" print ng911 env.workspace = ng911 datasets = ListDatasets() print "Datasets:" for dataset in datasets: print " " + str(dataset) tables = ListTables() print " tables:" for table in tables: fcc = GetCount_management(table) print " " + str(table) fd = datasets[0] fcs = ListFeatureClasses("", "", fd) for fc in fcs: fields = ListFields(fc) fcc = GetCount_management(fc) print fc + ", " + str(fcc) + " features" for field in fields: print " " + str(field.name) + ", " + str(field.type) checkfile = reviewpath + "/" + ntpath.basename(ng911) topo = fd + "/NG911_Topology" Compact_management(ng911)
def process_feature_classes(input_ws, output_ws, foreach_layer = None): """ processes each featureclass with an optional function input_ws - the database or dataset path to process feature classes output_ws - the output for the feature classes foreach_layer - the function to process the feature classes """ from arcpy import env, ListFeatureClasses, FeatureClassToGeodatabase_conversion, \ AddWarning, AddMessage, GetCount_management, FeatureClassToFeatureClass_conversion from os.path import join env.workspace = input_ws feature_classes = ListFeatureClasses() for feature_class in feature_classes: AddMessage('Processing {}...'.format(feature_class)) if env.skipEmpty: count = int(GetCount_management(feature_class)[0]) if count == 0: AddWarning('Skipping because table is empty: {}'.format(feature_class)) continue try: if foreach_layer: foreach_layer(input_ws, output_ws, feature_class) else: #copy each feature class over output_path = join(output_ws, get_name(feature_class)) delete_existing(output_path) FeatureClassToFeatureClass_conversion(feature_class, output_ws, get_name(feature_class)) except Exception as e: AddWarning('Error processing feature class {} - {}'.format(feature_class, e))
def AddROPrivs(OpRunOut): env.workspace = OpRunOut env.overwriteOutput = True print OpRunOut DissolvedFCList = ListFeatureClasses() for FC in DissolvedFCList: print FC ChangePrivileges_management(FC, "readonly", "GRANT", "AS_IS")
def RemoveGpHistory_fc(sdeconn,remove_gp_history_xslt,out_xml): ClearWorkspaceCache_management() env.workspace = sdeconn for fx in ListFeatureClasses(): name_xml = out_xml + os.sep + str(fx) + ".xml" #Process: XSLT Transformation XSLTransform_conversion(sdeconn + os.sep + fx, remove_gp_history_xslt, name_xml, "") print "Completed xml coversion on {0}".format(fx) # Process: Metadata Importer MetadataImporter_conversion(name_xml,sdeconn + os.sep + fx) print "Imported XML on {0}".format(fx)
def Analyzer(startworkspace): env.workspace = startworkspace dataList = ListTables() + ListFeatureClasses() for dataset in ListDatasets("*", "Feature"): env.workspace = os.path.join(startworkspace, dataset) dataList += ListFeatureClasses() + ListDatasets() AnalyzeDatasets_management(startworkspace, include_system="NO_SYSTEM", in_datasets=dataList, analyze_base="ANALYZE_BASE", analyze_delta="ANALYZE_DELTA", analyze_archive="ANALYZE_ARCHIVE") print "analyzed " + str(dataList) env.workspace = SDE AnalyzeDatasets_management(SDE, include_system="SYSTEM", in_datasets="", analyze_base="ANALYZE_BASE", analyze_delta="ANALYZE_DELTA", analyze_archive="ANALYZE_ARCHIVE") print "analyzed system tables" env.workspace = startworkspace
def Restart(): from arcpy import DeleteRows_management targetpath = currentPathSettings.EntDB + '/' + currentPathSettings.EDBName + '.' + currentPathSettings.EDBO + '.' + currentPathSettings.EFD print targetpath env.workspace = targetpath fclist = ListFeatureClasses() for fc in fclist: print fc #DeleteRows_management(fc) targetpath = currentPathSettings.EntDB env.workspace = targetpath tablelist = ListTables() for table in tablelist: print table DeleteRows_management(table)
def MigrateFeatureClasses(): fclist = ListFeatureClasses() for fc in fclist: if Exists(OracleDB + "/" + fc): print str(OracleDB + "/" + fc) + " Aleady exist" print else: print str(fc) + "... exporting to SDO in " + str(OracleDB) FeatureClassToFeatureClass_conversion( fc, OracleDB, str(fc), where_clause="#", config_keyword="SDO_GEOMETRY")
def LoadFinalStreets(inFile, inFeatureclass, inTable): LoadThis = inFeatureclass FromThis = inFile LoadTable = inTable targetpath = currentPathSettings.EntDB + '/' + currentPathSettings.EDBName + '.' + currentPathSettings.EDBO env.workspace = currentPathSettings.FinalPath print str(currentPathSettings.FinalPath) #list the file gebdatabases in the final directory workspace = ListWorkspaces(FromThis, "FileGDB") for ws in workspace: print ws env.workspace = ws #print the tables in the geodatabase tablelist = ListTables(LoadTable) print "tables" for table in tablelist: print " " + table print "loading " + ws + "/" + table + " to " + targetpath + table #CalledUpon(ws) #Try to load/append the rows i nthe alias table the aggregated geodatabase try: Append_management(ws + "/" + table, targetpath + "." + table, "NO_TEST", "#") except: print 'there was a problem loading alias table for' + ws #print the features classes stored in feature datasets Datasets = ListDatasets("*") for fd in Datasets: #print fd #print "feature classes - Polygon" #FCListPoly = ListFeatureClasses("*", "Polygon", fd) #for fc in FCListPoly: # print " "+fc #print "feature classes - Lines" FCListLine = ListFeatureClasses(LoadThis, "Polyline", fd) for fc in FCListLine: #print " "+fc print "loading " + ws + "/" + fc + " to " + targetpath + '.' + currentPathSettings.EFD + "." + fc try: Append_management( fc, targetpath + '.' + currentPathSettings.EFD + "/" + fc, "NO_TEST", "#") except: print 'there was a problem loading centerlines for' + ws
def SetProjection(): env.workspace = OracleDB fclist = ListFeatureClasses() for fc in fclist: if str(fc)[-2:] == 'MV': print "no actions taken on a Materialized View" pass else: spatial_ref = Describe(fc).spatialReference # If the spatial reference is unknown if spatial_ref.name == "Unknown": print("{0} has an unknown spatial reference".format(fc)) DefineProjection_management(OracleDB + "/" + fc, coor_system) # Otherwise, print out the feature class name and # spatial reference else: print("{0} : {1}".format(fc, spatial_ref.name))
def __init__(self, usar_data): self.log = log() self.dashboard_db = config["dashboard_database"] self.hqiis = self.dashboard_db + os.sep + config["hqiis"] self.current_use_catcode_table = self.dashboard_db + os.sep + config["catcodes"] self.sites = self.dashboard_db + os.sep + config["sites"] self.usar_data = usar_data env.workspace = self.usar_data self.feature_classes = set() feature_types = config["feature_types"].split(", ") self.layer_status = LayerStatus(self.dashboard_db + os.sep + config["CIP_Layer_Status"]) self.layer_status.baseline_the_table(self.__get_insts_sites(), feature_types) for ds in ListDatasets(): for fc in ListFeatureClasses(feature_dataset=ds): self.feature_classes.add(fc) read_cache = File("CheckForNeeds") self.previous_rpuids = read_cache.read()
def checker(self): # type: () -> bool try: env.workspace = self.in_db for dataset in ListDatasets(): for fc in ListFeatureClasses(feature_dataset=dataset): self.__fc = fc self.__fc_fields = ListFields(self.__fc) for installation in self.__get_installations(fc): if installation: self.__installation_field_check(installation) except Exception as e: self.log.exception(e.message) raise Exit() else: self.__write_result_to_table(self.__attributes) return True
def RemoveGpHistory_fc(remove_gp_history_xslt,out_xml, OpEnvironment): env.workspace = OpEnvironment.OpRunOut sdeconn = OpEnvironment.OpRunOut env.overwriteOutput = True print sdeconn ClearWorkspaceCache_management() for fx in ListFeatureClasses(): try: name_xml = out_xml + os.sep + str(fx) + ".xml" #Process: XSLT Transformation XSLTransform_conversion(sdeconn + os.sep + fx, remove_gp_history_xslt, name_xml, "") print "Completed xml conversion on {0}".format(fx) # Process: Metadata Importer MetadataImporter_conversion(name_xml,sdeconn + os.sep + fx) print "Imported XML on {0}".format(fx) except: print "could not complete xml conversion on {0}".format(fx) pass
# Start script start_time = time.time() # Get the parameters of project2 tool input_folder = GetParameterAsText(0) input_file = GetParameterAsText(1) # input_folder = "C:\\learnPython\\data\\lesson2" # input_file = "C:\\learnPython\\data\\lesson2\\StateRoutes.shp" print("Executing: Project2 tool " + str(input_folder) + " " + str(input_file)) print("Start Time: " + strftime("%a, %d %b %Y %H:%M:%S 2016", gmtime())) print("Running a script Project2-Batch-reprojection-tool-for-vector-datasets") # Get a list of all feature classes in the standard folder env.workspace = input_folder featureClass = ListFeatureClasses() # Get spatial reference of standard file desc_standard_file = Describe(input_file) sr_standard_file = desc_standard_file.SpatialReference # list for keep the reproject files names l = [] try: # Loop through all feature classes in standard folder for feature in featureClass: # Describe the feature spatial reference desc = Describe(feature) sr = desc.SpatialReference
def iteratorprocess(): env.workspace = extractDataGDBPath accDataFeaturesList = ListFeatureClasses("CrashLocation.GEO.ACC*") # Use the FullTable for the overall total. accDataFullTable = os.path.join(extractDataGDBPath, r'CrashLocation.GEO.GIS_GEOCODE_ACC') withRoadsTotal = 0 gcKTotal = 0 gcNKTotal = 0 ofsKTotal = 0 ofsNKTotal = 0 NG911CoAccidents = 0 inMemoryTempLayer = 'inMemoryTempFC' for countyItem in coAbbrAndNoList: countyNumber = countyItem[1] countyName = countyItem[2] accDataPointsKDOT = "CrashLocation.GEO.ACC_PTS_" + countyNumber accDataPointsNK = "CrashLocation.GEO.ACC_PTS_" + countyNumber + "_NK" accDataOffsetKDOT = "CrashLocation.GEO.ACC_OFS_PTS_" + countyNumber accDataOffsetNK = "CrashLocation.GEO.ACC_OFS_PTS_" + countyNumber + "_NK" # Re-zero the loop variables here so that the table doesn't get incorrect information in it. totalAccidents = 0 geocodedAccidents = 0 geocodedAccidentsNK = 0 offsetAccidents = 0 offsetAccidentsNK = 0 gcPercent = '0.00' gcNKPercent = '0.00' ofsPercent = '0.00' ofsNKPercent = '0.00' if (accDataPointsKDOT in accDataFeaturesList) or (accDataPointsNK in accDataFeaturesList) or \ (accDataOffsetKDOT in accDataFeaturesList) or (accDataOffsetNK in accDataFeaturesList): if accDataPointsKDOT in accDataFeaturesList: try: Delete_management(inMemoryTempLayer) except: pass accDataPointsKDOTPath = os.path.join(extractDataGDBPath, accDataPointsKDOT) MakeFeatureLayer_management(accDataPointsKDOTPath, inMemoryTempLayer) #SelectLayerByAttribute_management(inMemoryTempLayer, 'CLEAR_SELECTION') tempResult = GetCount_management(inMemoryTempLayer) totalAccidents = int(tempResult.getOutput(0)) if totalAccidents > 0: withRoadsTotal += totalAccidents else: pass selectWhereClause = """ Status <> 'U' """ SelectLayerByAttribute_management(inMemoryTempLayer, 'NEW_SELECTION', selectWhereClause) tempResult = GetCount_management(inMemoryTempLayer) geocodedAccidents = int(tempResult.getOutput(0)) else: pass if accDataPointsNK in accDataFeaturesList: try: Delete_management(inMemoryTempLayer) except: pass accDataPointsNKPath = os.path.join(extractDataGDBPath, accDataPointsNK) MakeFeatureLayer_management(accDataPointsNKPath, inMemoryTempLayer) selectWhereClause = """ Status <> 'U' """ SelectLayerByAttribute_management(inMemoryTempLayer, 'NEW_SELECTION', selectWhereClause) tempResult = GetCount_management(inMemoryTempLayer) geocodedAccidentsNK = int(tempResult.getOutput(0)) else: pass if accDataOffsetKDOT in accDataFeaturesList: try: Delete_management(inMemoryTempLayer) except: pass accDataOffsetKDOTPath = os.path.join(extractDataGDBPath, accDataOffsetKDOT) MakeFeatureLayer_management(accDataOffsetKDOTPath, inMemoryTempLayer) selectWhereClause = """ isOffset IS NOT NULL """ SelectLayerByAttribute_management(inMemoryTempLayer, 'NEW_SELECTION', selectWhereClause) tempResult = GetCount_management(inMemoryTempLayer) offsetAccidents = int(tempResult.getOutput(0)) else: pass if accDataOffsetNK in accDataFeaturesList: try: Delete_management(inMemoryTempLayer) except: pass accDataOffsetNKPath = os.path.join(extractDataGDBPath, accDataOffsetNK) MakeFeatureLayer_management(accDataOffsetNKPath, inMemoryTempLayer) selectWhereClause = """ isOffset IS NOT NULL """ SelectLayerByAttribute_management(inMemoryTempLayer, 'NEW_SELECTION', selectWhereClause) tempResult = GetCount_management(inMemoryTempLayer) offsetAccidentsNK = int(tempResult.getOutput(0)) else: pass try: gcPercent = "{0:.2f}".format( (float(geocodedAccidents) / totalAccidents) * 100) gcNKPercent = "{0:.2f}".format( (float(geocodedAccidentsNK) / totalAccidents) * 100) ofsPercent = "{0:.2f}".format( (float(offsetAccidents) / totalAccidents) * 100) ofsNKPercent = "{0:.2f}".format( (float(offsetAccidentsNK) / totalAccidents) * 100) except ZeroDivisionError: gcPercent = None gcNKPercent = None ofsPercent = None ofsNKPercent = None except: pass gcKTotal += geocodedAccidents gcNKTotal += geocodedAccidentsNK ofsKTotal += offsetAccidents ofsNKTotal += offsetAccidentsNK NG911CoAccidents += totalAccidents print("\n" + countyName + " County has " + str(totalAccidents) + " totalAccidents.") print("gcPercent: " + gcPercent + " gcNKPercent: " + gcNKPercent + " ofsPercent: " + ofsPercent + " ofsNKPercent: " + ofsNKPercent) # To get the withRoadsTotal, sum the number for each county that # returned a non-zero result for totalAccidents. else: pass reportResult = [ countyName, totalAccidents, gcPercent, gcNKPercent, ofsPercent, ofsNKPercent ] reportResultsList.append(reportResult) try: Delete_management(inMemoryTempLayer) except: pass MakeTableView_management(accDataFullTable, inMemoryTempLayer) tempResult = GetCount_management(inMemoryTempLayer) overallTotal = int(tempResult.getOutput(0)) for reportResultItem in reportResultsList: print str(reportResultItem[0]) gcNG911Percent = "{0:.2f}".format( (float(gcKTotal) / NG911CoAccidents) * 100) gcNKNG911Percent = "{0:.2f}".format( (float(gcNKTotal) / NG911CoAccidents) * 100) ofsNG911Percent = "{0:.2f}".format( (float(ofsKTotal) / NG911CoAccidents) * 100) ofsNKNG911Percent = "{0:.2f}".format( (float(ofsNKTotal) / NG911CoAccidents) * 100) print "\n" + "The NG911Total is: " + str(NG911CoAccidents) print(" with gcPercent: " + gcNG911Percent + " gcNKPercent: " + gcNKNG911Percent + " ofsPercent: " + ofsNG911Percent + " ofsNKPercent: " + ofsNKNG911Percent) reportResult = [ "NG911Total", NG911CoAccidents, gcNG911Percent, gcNKNG911Percent, ofsNG911Percent, ofsNKNG911Percent ] reportResultsList.append(reportResult) gcOverallPercent = "{0:.2f}".format((float(gcKTotal) / overallTotal) * 100) gcNKOverallPercent = "{0:.2f}".format( (float(gcNKTotal) / overallTotal) * 100) ofsOverallPercent = "{0:.2f}".format( (float(ofsKTotal) / overallTotal) * 100) ofsNKOverallPercent = "{0:.2f}".format( (float(ofsNKTotal) / overallTotal) * 100) print "\n" + "The OverallTotal is: " + str(overallTotal) print(" with gcPercent: " + gcOverallPercent + " gcNKPercent: " + gcNKOverallPercent + " ofsPercent: " + ofsOverallPercent + " ofsNKPercent: " + ofsNKOverallPercent) reportResult = [ "OverallTotal", overallTotal, gcOverallPercent, gcNKOverallPercent, ofsOverallPercent, ofsNKOverallPercent ] reportResultsList.append(reportResult) resultsTablePath = recreateResultsTable() # Delete the previous table information, if any, then create an insert cursor # and place all of the report result items in the table. newICursor = InsertCursor(resultsTablePath, insertCursorFields) for reportResultItem in reportResultsList: insertedRowID = newICursor.insertRow(reportResultItem) print "Inserted a new row into the REPORT_INFO table with OID: " + str( insertedRowID)
def iteratorprocess(): roadChecksPath = r'\\gisdata\ArcGIS\GISdata\Accident Geocode\Python\RoadChecks' roadChecksFileList = os.listdir(roadChecksPath) env.workspace = extractDataGDBPath accDataPointsList = ListFeatureClasses("CrashLocation.GEO.ACC_PTS_*") useKDOTIntersect = True for gdbName in roadChecksFileList: if len(gdbName) > 23 and gdbName[0] != "x" and gdbName[-15] == "_" and gdbName[-15:].lower() == "_roadchecks.gdb": countyAbbr = gdbName[-23] + gdbName[-22] roadChecksGDBPath = os.path.join(roadChecksPath, gdbName) roadsFeatures = os.path.join(roadChecksGDBPath, r'NG911\RoadCenterline') roadsAlias = os.path.join(roadChecksGDBPath, r'RoadAlias') # If the translation of the roadChecks gdb's county abbr to countyNo # exists as geocoded point features in the crashLocation sql # instance with either KDOT fields or non-KDOT fields, then # call the offsetdirectioncaller function with the appropriate # parameters for that roadChecks gdb and geocoded point # features. #try: for countyItem in coAbbrAndNoList: if countyAbbr.lower() == countyItem[0].lower(): countyNumber = countyItem[1] accDataPointsKDOT = "CrashLocation.GEO.ACC_PTS_" + countyNumber if (accDataPointsKDOT) in accDataPointsList: useKDOTIntersect = True offsetFeaturesNameKDOT = "CrashLocation.GEO.ACC_OFS_PTS_" + countyNumber print ("Calling offsetdirectioncaller function for " + roadChecksGDBPath + "\n" + ", " + accDataPointsKDOT + ", and " + offsetFeaturesNameKDOT + ".") accDataPathKDOT = os.path.join(extractDataGDBPath, accDataPointsKDOT) offsetFeaturesPathKDOT = os.path.join(extractDataGDBPath, offsetFeaturesNameKDOT) offsetdirectioncaller(accDataPathKDOT, roadsAlias, roadsFeatures, offsetFeaturesPathKDOT, useKDOTIntersect) else: pass accDataPointsNK = "CrashLocation.GEO.ACC_PTS_" + countyNumber + "_NK" if (accDataPointsNK) in accDataPointsList: useKDOTIntersect = False offsetFeaturesNameNK = "CrashLocation.GEO.ACC_OFS_PTS_" + countyNumber + "_NK" print ("Calling offsetdirectioncaller function for " + roadChecksGDBPath + "\n" + ", " + accDataPointsNK + ", and " + offsetFeaturesNameNK + ".") accDataPathNK = os.path.join(extractDataGDBPath, accDataPointsNK) offsetFeaturesPathNK = os.path.join(extractDataGDBPath, offsetFeaturesNameNK) offsetdirectioncaller(accDataPathNK, roadsAlias, roadsFeatures, offsetFeaturesPathNK, useKDOTIntersect) else: pass else: pass else: pass #except: #print "An error occurred." #print "" else: pass
def DissolveNonDirectionalItems(OpEnvironmentMode): OpRunIn = OpEnvironment.OpRunInSum # @UndefinedVariable OpRunOut = OpEnvironment.OpRunOut # @UndefinedVariable adm = OpEnvironment.adm # @UndefinedVariable Owner = OpEnvironment.Owner # @UndefinedVariable DB = OpEnvironment.DB # @UndefinedVariable env.workspace = OpRunIn env.overwriteOutput = True print OpRunIn #combine the connection, db, and owner to the destination path for enterprise geodatabase output OpRunFullOut = OpRunOut + "/" + DB + "." + Owner + "." print OpRunFullOut FCList = ListFeatureClasses() print "dissolving items in the primary direction" FCGlobalFieldsDissolve = [ "LRS_KEY", "COUNTY_CD", "COUNTY_NAME", "DISTRICT" ] FCGlobalFieldsSummarize = "BSMP MIN;ESMP MAX;BCMP MIN;ECMP MAX" FCFieldsIgnore = [ "OBJECTID", "CRND_RTE", "LANE_DIRECTION", "DIV_UNDIV", "SHAPE", "SHAPE.STLength()", "BSMP", "ESMP", "BCMP", "ECMP", "OLD_FUN_CLASS", "FUN_DT" ] for Item in FCList: ItemOut = Item[2:] ItemDissolveFields = [] print ItemOut fields = ListFields(Item) for field in fields: if field.name not in FCFieldsIgnore: #print " "+field.name ItemDissolveFields.append(field.name) dissolvelist = ItemDissolveFields + FCGlobalFieldsDissolve DissolveFields = ';'.join(dissolvelist) if Exists(OpRunFullOut + ItemOut): try: print "feature class " + str( ItemOut) + " exists and will be updated" Dissolve_management(Item, "in_memory/" + ItemOut, DissolveFields, FCGlobalFieldsSummarize, "MULTI_PART", "DISSOLVE_LINES") TruncateTable_management(OpRunFullOut + ItemOut) Append_management("in_memory/" + ItemOut, OpRunFullOut + ItemOut, "NO_TEST", "#") Delete_management("in_memory/" + ItemOut) print "feature class " + str( ItemOut) + " was successfully updated" except ExecuteError: print "update failed because the schema has changed from what existed" #need to add locking DisconnectUser(adm, "readonly") AcceptConnections(adm, True) Delete_management(OpRunFullOut + ItemOut) print "recreating the dissolved feature class for " + str( ItemOut) Dissolve_management(Item, OpRunFullOut + ItemOut, DissolveFields, FCGlobalFieldsSummarize, "MULTI_PART", "DISSOLVE_LINES") ChangePrivileges_management(OpRunFullOut + ItemOut, "readonly", "GRANT", "AS_IS") except: print "another error happened on updating the feature class" else: print "feature class " + str( ItemOut) + " will be created or overwritten" DisconnectUser(adm, "readonly") AcceptConnections(adm, True) Dissolve_management(Item, OpRunFullOut + ItemOut, DissolveFields, FCGlobalFieldsSummarize, "MULTI_PART", "DISSOLVE_LINES") ChangePrivileges_management(OpRunFullOut + ItemOut, "readonly", "GRANT", "AS_IS") try: Delete_management("in_memory/" + Item) except: pass
# FUNCTIONALITY printAndLog(strPSAForLogging_ScriptInitiated, strInfo) # Is the gdb path valid if not path.exists(strRootGeodatabasePath): printAndLog("Path {} does not exist.\n".format(strRootGeodatabasePath), strError) exit() else: # Set the workspace for ESRI tools env.workspace = strRootGeodatabasePath # Make a list of the feature classes and check the projection (spatial reference) lsFeatureClassesInMasterGDB = sorted( ListFeatureClasses(wild_card=None, feature_type="Point", feature_dataset=None)) for fc in lsFeatureClassesInMasterGDB: boolExpectedFactoryCode = False # Get the spatial reference and attributes try: spatrefProjectionName = Describe(fc).spatialReference intFactoryCode = spatrefProjectionName.factoryCode strPCSName = spatrefProjectionName.PCSName if (intFactoryCode == dictExpectedWKID_FactoryCode[fc][0]) and ( strPCSName == dictExpectedWKID_FactoryCode[fc][1]): boolExpectedFactoryCode = True else: boolExpectedFactoryCode = False except: