def ReplaceEsriDomains(Tables): ''' This function takes a list of database tables and based on their names reuploads them to their respective ESRI Enterprise database as domains. The old domain values are replaced with the newly ordered values in the tables. If the domain does not already exist in the enterprise database then it is skipped over by this function. ''' ExistingCeoDomains = arcpy.da.ListDomains( r"Database Connections\IAMUW-FS_CEO.sde") ExistingMcDomains = arcpy.da.ListDomains( r"Database Connections\IAMUW-FS_MAC.sde") for Table in Tables: if 'CEO' or 'TS' in Table: for Domain in ExistingCeoDomains: if Domain.name in Table: arcpy.TableToDomain_management( Table, 'Code', 'Description', r"Database Connections\IAMUW-FS_CEO.sde", Table, '', 'REPLACE') if 'MC' in Table: for Domain in ExistingMcDomains: if Domain.name in Table: arcpy.TableToDomain_management( Table, 'Code', 'Description', r"Database Connections\IAMUW-FS_MAC.sde", Table, '', 'REPLACE') for Table in Tables: arcpy.TableToDomain_management(Table, 'Code', 'Description', r"Database Connections\PUB_DOMAIN.sde", Table, '', 'REPLACE')
def updateAssignmentDomain(): # Process: Table To Domain (10) Assignments = "Assignments" try: cAssign = arcpy.GetCount_management(Assignments) if int(cAssign.getOutput(0)) > 0: arcpy.AddMessage("\nUpdate Assignment Numbers domain\n") arcpy.TableToDomain_management(Assignments, "Assignment_Number", "Assignment_Number", wrkspc, "Assignment_Number", "Assignment_Number", "REPLACE") try: arcpy.SortCodedValueDomain_management(wrkspc, "Assignment_Number", "DESCRIPTION", "ASCENDING") except: pass else: arcpy.AddMessage("No Assignment Numbers to update") except: arcpy.AddMessage( "Error in Assignment Numbers update: may be two Assignments with same number or multiple blanks" ) return
def updateDomains(domainsFolder, targetGDB): try: arcpy.AddMessage("Starting: UpdateDomains") # Update all coded (list) domains path = os.path.normpath( os.path.join(domainsFolder, "Coded_Domain_*.csv")) for fname in glob.glob(path): # Find the domain's name and description and then add it to the gdb domainName, domainDescription = findNameAndDescription( fname, "Coded_Domain_") arcpy.AddMessage("Updating domain " + domainName + "...") arcpy.TableToDomain_management(fname, "Value", "Name", targetGDB, domainName, domainDescription, "REPLACE") # Update all range domains path = os.path.normpath( os.path.join(domainsFolder, "Range_Domain_*.csv")) for fname in glob.glob(path): # Find the domain's name and description and then add it to the gdb domainName, domainDescription = findNameAndDescription( fname, "Range_Domain_") arcpy.AddMessage("Updating domain " + domainName + "...") # Set the range domain's min and max values with open(fname, 'r') as csvFile: reader = csv.reader(csvFile, dialect='excel') # Skip the header, use the second line header = next(reader) line = next(reader) arcpy.CreateDomain_management(targetGDB, domainName, domainDescription, Utility.fieldTypeLookup(line[0]), "RANGE") arcpy.SetValueForRangeDomain_management( targetGDB, domainName, int(line[1]), int(line[2])) except Exception as err: arcpy.AddError( traceback.format_exception_only(type(err), err)[0].rstrip()) else: arcpy.AddMessage("Success! - Completed: UpdateDomains") finally: arcpy.AddMessage("Exiting: UpdateDomains")
def CopyDomainsBetweenTables(inputTable, outputTable): inGDB = os.path.split(inputTable)[0] outGDB = os.path.split(outputTable)[0] arcpy.env.overwriteOutput = True dscOutWorkspace = arcpy.Describe(outGDB) domainList = [] domainDict = dict() fieldList = arcpy.ListFields(inputTable) for field in fieldList: if field.domain: print("{0} has domain {1}".format(field.name, field.domain)) if not (field.domain in dscOutWorkspace.domains): arcpy.DomainToTable_management(inGDB, field.domain, "IN_MEMORY/DomainTab", "Code", "Description") arcpy.TableToDomain_management("IN_MEMORY/DomainTab", "Code", "Description", outGDB, field.domain) arcpy.AddMessage("Added " + field.domain + " to " + outGDB) domainDict[field.name] = field.domain domainList += [field.domain] fieldList = arcpy.ListFields(outputTable) for field in fieldList: if not field.domain and domainDict.has_key(field.name): arcpy.AssignDomainToField_management(outputTable, field.name, domainDict[field.name]) arcpy.AddMessage("Assigned " + field.domain + " to " + outputTable) arcpy.SetParameterAsText(2, outputTable)
def crearDominios(archExcel=""): #------------------------------ if eco: imprimir("Procesando DOMINIOS") if archExcel == "": arcExcel = archivoE if not os.path.exists(archExcel): return dominios = ['ESTADO', 'CAUSAL', 'EQUIPO'] listaD = arcpy.da.ListDomains(arcpy.env.workspace) ll = [] for dn in listaD: ll.append(dn.name) for dm in dominios: x = "SI_" + dm if eco: imprimir("DOMINIO =" + x) if x in ll: pass else: try: arcpy.ExcelToTable_conversion(archExcel, "T_" + x, x) arcpy.TableToDomain_management("T_" + x, "CODIGO", "DESCRIPCION", arcpy.env.workspace, x) except: imprimir("===> PROBLEMA PARA PROCESAR DOMINIO:" + x) pass
def updtDomain(domTable, codeField, descField, wrkspc, domName, domDesc, updateOp, TbleText): # Count the rows in the table to make sure there is something to update cSub = arcpy.GetCount_management(domTable) if int(cSub.getOutput(0)) > 0: # Update the attribute table with the GlobalID before updating the domain with arcpy.da.SearchCursor(domTable, ['GlobalID']) as cursor: for row in cursor: RowID = str(row[0]) del row del cursor cursor = arcpy.da.UpdateCursor(domTable, [codeField]) for row in cursor: row[0] = RowID cursor.updateRow(row) del cursor, row # Process: Create a domain from an existing table msg = "Update {} domain".format(TbleText) arcpy.AddMessage(msg) try: arcpy.TableToDomain_management(domTable, codeField, descField, wrkspc, domName, domDesc, updateOp) arcpy.SortCodedValueDomain_management(wrkspc, domName, "DESCRIPTION", "ASCENDING") except: msg = "Unable to update {} domain".format(TbleText) arcpy.AddWarning(msg) pass else: msg = "No {} to update".format(TbleText) arcpy.AddMessage(msg)
def recreate_domain(workspace,csv_path,code_field,desc_field,domain_name,old_domain_obj): arcpy.TableToDomain_management( in_table = csv_path, code_field = code_field, description_field = desc_field, in_workspace = workspace, domain_name = domain_name, domain_description = old_domain_obj.description, update_option = 'REPLACE')
def generate_domains(gdb, domain_dict, xlsx): #Pull domain values from pre-formatted xlsx for dom in domain_dict.viewkeys(): try: domTable = os.path.join(xlsx, dom + "$") codeField = "Code" descField = "Name" domDesc = domain_dict.get(dom) # Process: Create a domain from an existing table arcpy.TableToDomain_management(domTable, codeField, descField, gdb, dom, domDesc, "REPLACE") except Exception, e: print e.message
def AddSubtypeDomains(feature_list, workspace, Parameter_Values): """ Applies the subtypes listed in the Anthro_Attribute_Table as a domain to the Subtype field in the anthro feature classes. :param feature_list: a list of anthro features :param workspace: the project's unique gdb :param Parameter_Values: the Parameter_Values table :return: None """ arcpy.TableToDomain_management(Parameter_Values, "Subtype", "Subtype", workspace, "Subtype", "Valid anthropogenic subtypes", "REPLACE") for feature in feature_list: arcpy.AssignDomainToField_management(feature, "Subtype", "Subtype")
def create_table_EVA_Vitebsk(self): """ This function is created only for city Vitebsk, cause it didn't work correctly. :return: domain and sybtypes Vitebsk in DateBase Vitebski district """ conn = pyodbc.connect( "DRIVER={Oracle in OraClient10g_home1};DBQ=NCABASE:1521/WIN.MINSK.NCA;UID=" + self.login_to_DB + ";PWD=" + self.password_to_DB) cursor = conn.cursor() # SQL expression expression = """with tabl1 as (SELECT * from ATEOBJECT r3 where r3.CATEGORY = 103 AND r3.UIDOPERIN=( SELECT MAX (r4.UIDOPERIN) FROM ATEREESTR.ATEOBJECT r4 WHERE r3.OBJECTNUMBER=r4.OBJECTNUMBER GROUP BY r3.OBJECTNUMBER)) SELECT j.IAEUID AS "ID_EVA", j.OBJECTNUMBER as "ID_ATE", j.ELEMENTNAME, x.SHORTNAME_RUS, R.NAMEOBJECT, p.SHORTNAME, t.OBJECTNUMBER as "SELSOV", r.CATEGORY, j.ELEMENTTYPE FROM IAE.ADRELEMENTS j, ATEREESTR.X_ATECATEGORY p, ATEREESTR.X_ATEDISTRICTS i, ATEREESTR.X_ATEREGION g, NKA_SPR.X_EVA_TYPES_ADDR x, ATEREESTR.ATEOBJECT r LEFT JOIN tabl1 t ON r.SOATODEPEND = t.SOATO LEFT JOIN ATEREESTR.X_ATECATEGORY p2 ON t.CATEGORY = p2.CATEGORY where r.UIDOPEROUT is null and j.OBJECTNUMBER = R.OBJECTNUMBER and R.UIDDISTR = I.UIDDISTR and R.UIDREGION = G.UIDREGION and R.CATEGORY = p.CATEGORY and j.JRNREG_OUT is null and j.ELEMENTTYPE < 50 and R.OBJECTNUMBER = 9387 and x.CODE_1 = j.ELEMENTTYPE order by R.NAMEOBJECT, j.ELEMENTNAME""" cursor.execute(expression) self.table_EVA_Vitebsk = cursor.fetchall() arcpy.AddSubtype_management(self.nameStreets, 9387, "Витебск") arcpy.CreateTable_management(self.nameDataBase, "Витебск_EVA", "", "") name_etalon_eva = os.path.join(self.nameDataBase, "Витебск_EVA") arcpy.AddField_management(name_etalon_eva, "ID_ATE", "LONG", "", "", "", "", "NULLABLE", "REQUIRED", "") arcpy.AddField_management(name_etalon_eva, "ID_EVA", "LONG", "", "", "", "", "NULLABLE", "REQUIRED", "") arcpy.AddField_management(name_etalon_eva, "Name_EVA", "TEXT", "", "", "", "", "NULLABLE", "REQUIRED", "") cursor_arc = arcpy.da.InsertCursor(name_etalon_eva, ["ID_ATE", "ID_EVA", "Name_EVA"]) for el in self.table_EVA_Vitebsk: cursor_arc.insertRow([ el[1], el[0], '{0}_{1}'.format(el[2].encode('utf-8'), el[3].encode('utf-8')) ]) arcpy.TableToDomain_management(name_etalon_eva, "ID_EVA", "Name_EVA", self.nameDataBase, "Витебск_EVA", "EVA", "APPEND") # arcpy.AssignDomainToField_management(self.nameStreets, "Name_EVA", "Витебск_EVA", 9387) arcpy.Delete_management(name_etalon_eva, "Table")
def create_domen_ATE_in_DateBase(self): """ Create domain ATE, which containes list of settlements in chosen district :return: domain ATE in DateBase """ for domen_table_ATE in self.list_domen_tables_ATE: #Create domen EVA from domen_table arcpy.TableToDomain_management(domen_table_ATE, "ID_ATE", "Name_ATE", self.nameDataBase, domen_table_ATE.split("""\\""")[-1], "ATE", "APPEND") #Delete domen_table in DateBase arcpy.Delete_management(domen_table_ATE, "Table") arcpy.AssignDomainToField_management( self.nameStreets, 'Name', domen_table_ATE.split("""\\""")[-1], self.new_dict_syptypes[ domen_table_ATE.split("""\\""")[-1].split('_')[0]][0])
def import_tables_as_domains(tables, geodatabase): """import tables as domains into geodatabase :param tables {string} path or array of paths :param geodatabase Path or reference to a geodatabase.""" try: for table in tables: desc = arcpy.Describe(table) dname = desc.name.replace("_domain", "") arcpy.TableToDomain_management(table, "codedValues", "description", geodatabase, dname) except Exception as e: output_msg(str(e.args[0])) output_msg(arcpy.GetMessages()) finally: output_msg("Completed")
def UniqueValueToDomain(Workspace, Table, Field_Name, Domain_Name): arcpy.env.overwriteOutput = True tempFRQTable = 'IN_MEMORY/FRQ' tempFRQView = "frq_View" InputField = FindField(Table, str(Field_Name)) #arcpy.AddMessage(InputField.type) if (InputField.type in [ u'SmallInteger', u'Double', u'Long', u'OID', u'Single', u'Integer' ]): notEmptySQL = """"Code" IS NULL""" else: notEmptySQL = """"Code" IS NULL OR "Code" = ''""" # Process: Frequency arcpy.Frequency_analysis(Table, tempFRQTable, Field_Name) # Process: AddField arcpy.AddField_management(tempFRQTable, "Description", "TEXT") arcpy.AddField_management(tempFRQTable, "Code", InputField.type, InputField.precision, InputField.scale, InputField.length) # Process: CalculateField arcpy.CalculateField_management(tempFRQTable, "Description", "[" + Field_Name + "]", "VB", "") arcpy.CalculateField_management(tempFRQTable, "Code", "[" + Field_Name + "]", "VB", "") #Delete empty codes arcpy.MakeTableView_management(tempFRQTable, tempFRQView) arcpy.SelectLayerByAttribute_management(tempFRQView, "NEW_SELECTION", notEmptySQL) arcpy.DeleteRows_management(tempFRQView) # Process: TableToDomain arcpy.TableToDomain_management(tempFRQView, "Code", "Description", Workspace, Domain_Name, "Description", "REPLACE") # Process: AssignDomainToField arcpy.AssignDomainToField_management(Table, Field_Name, Domain_Name)
def updtDomain2(domTable, codeField, descField, wrkspc, domName, domDesc, updateOp, TbleText): # Count the rows in the table to make sure there is something to update cSub = arcpy.GetCount_management(domTable) if int(cSub.getOutput(0)) > 0: # Process: Create a domain from an existing table msg = "Update {} domain".format(TbleText) arcpy.AddMessage(msg) try: arcpy.TableToDomain_management(domTable, codeField, descField, wrkspc, domName, domDesc, updateOp) arcpy.SortCodedValueDomain_management(wrkspc, domName, "DESCRIPTION", "ASCENDING") except: msg = "Unable to update {} domain".format(TbleText) arcpy.AddWarning(msg) pass else: msg = "No {} to update".format(TbleText) arcpy.AddMessage(msg)
def create_domen_EVA_in_DateBase(self): """ Create domains streets in DateBase from tables streets and delete table streets :return: domain streets """ for domen_table_EVA in self.list_domen_tables_EVA: #Create domen EVA from domen_table arcpy.TableToDomain_management(domen_table_EVA, "ID_EVA", "Name_EVA", self.nameDataBase, domen_table_EVA.split("""\\""")[-1], "EVA", "APPEND") #Delete domen_table in DateBase arcpy.Delete_management(domen_table_EVA, "Table") if self.name_district != "Минск": arcpy.AssignDomainToField_management( self.nameStreets, 'Name_EVA', domen_table_EVA.split("""\\""")[-1], self.new_dict_syptypes[domen_table_EVA.split("""\\""") [-1].split('_')[0]][0]) else: arcpy.AssignDomainToField_management( self.nameStreets, 'Name_EVA', domen_table_EVA.split("""\\""")[-1], 17030)
def main(thisDB, coordSystem, nCrossSections): # create feature dataset GeologicMap addMsgAndPrint(' Creating feature dataset GeologicMap...') try: arcpy.CreateFeatureDataset_management(thisDB, 'GeologicMap', coordSystem) except: addMsgAndPrint(arcpy.GetMessages(2)) # create feature classes in GeologicMap # poly feature classes featureClasses = ['MapUnitPolys'] for fc in ['DataSourcePolys', 'MapUnitOverlayPolys', 'OverlayPolys']: if fc in OptionalElements: featureClasses.append(fc) for featureClass in featureClasses: fieldDefs = tableDict[featureClass] if addLTYPE and fc <> 'DataSourcePolys': fieldDefs.append(['PTYPE', 'String', 'NullsOK', 50]) createFeatureClass(thisDB, 'GeologicMap', featureClass, 'POLYGON', fieldDefs) # line feature classes featureClasses = ['ContactsAndFaults'] for fc in ['GeologicLines', 'CartographicLines', 'IsoValueLines']: if fc in OptionalElements: featureClasses.append(fc) if debug: addMsgAndPrint('Feature classes = ' + str(featureClasses)) for featureClass in featureClasses: fieldDefs = tableDict[featureClass] if featureClass in ['ContactsAndFaults', 'GeologicLines'] and addLTYPE: fieldDefs.append(['LTYPE', 'String', 'NullsOK', 50]) createFeatureClass(thisDB, 'GeologicMap', featureClass, 'POLYLINE', fieldDefs) # point feature classes featureClasses = [] for fc in [ 'OrientationPoints', 'GeochronPoints', 'FossilPoints', 'Stations', 'GenericSamples', 'GenericPoints' ]: if fc in OptionalElements: featureClasses.append(fc) for featureClass in featureClasses: fieldDefs = tableDict[featureClass] if addLTYPE: fieldDefs.append(['PTTYPE', 'String', 'NullsOK', 50]) createFeatureClass(thisDB, 'GeologicMap', featureClass, 'POINT', fieldDefs) # create feature dataset CorrelationOfMapUnits if 'CorrelationOfMapUnits' in OptionalElements: addMsgAndPrint(' Creating feature dataset CorrelationOfMapUnits...') arcpy.CreateFeatureDataset_management(thisDB, 'CorrelationOfMapUnits') fieldDefs = tableDict['CMUMapUnitPolys'] createFeatureClass(thisDB, 'CorrelationOfMapUnits', 'CMUMapUnitPolys', 'POLYGON', fieldDefs) fieldDefs = tableDict['CMULines'] createFeatureClass(thisDB, 'CorrelationOfMapUnits', 'CMULines', 'POLYLINE', fieldDefs) fieldDefs = tableDict['CMUPoints'] createFeatureClass(thisDB, 'CorrelationOfMapUnits', 'CMUPoints', 'POINT', fieldDefs) # create CrossSections if nCrossSections > 26: nCrossSections = 26 if nCrossSections < 0: nCrossSections = 0 # note space in position 0 alphabet = ' ABCDEFGHIJKLMNOPQRSTUVWXYZ' for n in range(1, nCrossSections + 1): xsLetter = alphabet[n] xsName = 'CrossSection' + xsLetter xsN = 'CS' + xsLetter #create feature dataset CrossSectionA addMsgAndPrint(' Creating feature data set CrossSection' + xsLetter + '...') arcpy.CreateFeatureDataset_management(thisDB, xsName) fieldDefs = tableDict['MapUnitPolys'] if addLTYPE: fieldDefs.append(['PTYPE', 'String', 'NullsOK', 100]) fieldDefs[0][0] = xsN + 'MapUnitPolys_ID' createFeatureClass(thisDB, xsName, xsN + 'MapUnitPolys', 'POLYGON', fieldDefs) fieldDefs = tableDict['ContactsAndFaults'] if addLTYPE: fieldDefs.append(['LTYPE', 'String', 'NullsOK', 100]) fieldDefs[0][0] = xsN + 'ContactsAndFaults_ID' createFeatureClass(thisDB, xsName, xsN + 'ContactsAndFaults', 'POLYLINE', fieldDefs) fieldDefs = tableDict['OrientationPoints'] if addLTYPE: fieldDefs.append(['PTTYPE', 'String', 'NullsOK', 100]) fieldDefs[0][0] = xsN + 'OrientationPoints_ID' createFeatureClass(thisDB, xsName, xsN + 'OrientationPoints', 'POINT', fieldDefs) # create tables tables = ['DescriptionOfMapUnits', 'DataSources', 'Glossary'] for tb in [ 'RepurposedSymbols', 'StandardLithology', 'GeologicEvents', 'MiscellaneousMapInformation' ]: if tb in OptionalElements: tables.append(tb) for table in tables: addMsgAndPrint(' Creating table ' + table + '...') try: arcpy.CreateTable_management(thisDB, table) fieldDefs = tableDict[table] for fDef in fieldDefs: try: if fDef[1] == 'String': arcpy.AddField_management(thisDB + '/' + table, fDef[0], transDict[fDef[1]], '#', '#', fDef[3], '#', transDict[fDef[2]]) else: arcpy.AddField_management(thisDB + '/' + table, fDef[0], transDict[fDef[1]], '#', '#', '#', '#', transDict[fDef[2]]) except: addMsgAndPrint('Failed to add field ' + fDef[0] + ' to table ' + table) addMsgAndPrint(arcpy.GetMessages(2)) except: addMsgAndPrint(arcpy.GetMessages()) ### GeoMaterials addMsgAndPrint(' Setting up GeoMaterials table and domains...') # Copy GeoMaterials table arcpy.Copy_management( os.path.dirname(sys.argv[0]) + '/../Resources/GeMS_lib.gdb/GeoMaterialDict', thisDB + '/GeoMaterialDict') # make GeoMaterials domain arcpy.TableToDomain_management(thisDB + '/GeoMaterialDict', 'GeoMaterial', 'IndentedName', thisDB, 'GeoMaterials') # attach it to DMU field GeoMaterial arcpy.AssignDomainToField_management(thisDB + '/DescriptionOfMapUnits', 'GeoMaterial', 'GeoMaterials') # Make GeoMaterialConfs domain, attach it to DMU field GeoMaterialConf arcpy.CreateDomain_management(thisDB, 'GeoMaterialConfidenceValues', '', 'TEXT', 'CODED') for val in GeoMaterialConfidenceValues: arcpy.AddCodedValueToDomain_management(thisDB, 'GeoMaterialConfidenceValues', val, val) arcpy.AssignDomainToField_management(thisDB + '/DescriptionOfMapUnits', 'GeoMaterialConfidence', 'GeoMaterialConfidenceValues') #Confidence domains, Glossary entries, and DataSources entry if addConfs: addMsgAndPrint( ' Adding standard ExistenceConfidence and IdentityConfidence domains' ) # create domain, add domain values, and link domain to appropriate fields addMsgAndPrint( ' Creating domain, linking domain to appropriate fields') arcpy.CreateDomain_management(thisDB, 'ExIDConfidenceValues', '', 'TEXT', 'CODED') for item in DefaultExIDConfidenceValues: # items are [term, definition, source] code = item[0] arcpy.AddCodedValueToDomain_management(thisDB, 'ExIDConfidenceValues', code, code) arcpy.env.workspace = thisDB dataSets = arcpy.ListDatasets() for ds in dataSets: arcpy.env.workspace = thisDB + '/' + ds fcs = arcpy.ListFeatureClasses() for fc in fcs: fieldNames = fieldNameList(fc) for fn in fieldNames: if fn in ('ExistenceConfidence', 'IdentityConfidence', 'ScientificConfidence'): #addMsgAndPrint(' '+ds+'/'+fc+':'+fn) arcpy.AssignDomainToField_management( thisDB + '/' + ds + '/' + fc, fn, 'ExIDConfidenceValues') # add definitions of domain values to Glossary addMsgAndPrint(' Adding domain values to Glossary') ## create insert cursor on Glossary cursor = arcpy.da.InsertCursor( thisDB + '/Glossary', ['Term', 'Definition', 'DefinitionSourceID']) for item in DefaultExIDConfidenceValues: cursor.insertRow((item[0], item[1], item[2])) del cursor # add definitionsource to DataSources addMsgAndPrint(' Adding definition source to DataSources') ## create insert cursor on DataSources cursor = arcpy.da.InsertCursor(thisDB + '/DataSources', ['DataSources_ID', 'Source', 'URL']) cursor.insertRow(( 'FGDC-STD-013-2006', 'Federal Geographic Data Committee [prepared for the Federal Geographic Data Committee by the U.S. Geological Survey], 2006, FGDC Digital Cartographic Standard for Geologic Map Symbolization: Reston, Va., Federal Geographic Data Committee Document Number FGDC-STD-013-2006, 290 p., 2 plates.', 'https://ngmdb.usgs.gov/fgdc_gds/geolsymstd.php')) del cursor # if cartoReps, add cartographic representations to all feature classes # trackEdits, add editor tracking to all feature classes and tables if cartoReps or trackEdits: arcpy.env.workspace = thisDB tables = arcpy.ListTables() datasets = arcpy.ListDatasets() for dataset in datasets: addMsgAndPrint(' Dataset ' + dataset) arcpy.env.workspace = thisDB + '/' + dataset fcs = arcpy.ListFeatureClasses() for fc in fcs: hasReps, repLyr = cartoRepsExistAndLayer(fc) if cartoReps and hasReps: addMsgAndPrint( ' Adding cartographic representations to ' + fc) try: arcpy.AddRepresentation_cartography( fc, fc + '_rep1', 'RuleID1', 'Override1', default, repLyr, 'NO_ASSIGN') """ Note the 1 suffix on the representation name (fc+'_rep1') and the RuleID1 and Override1 fields. If at some later time we wish to add additional representations to a feature class, each will require it's own RuleID and Override fields which may be identified, and tied to the appropriate representation, by suffixes 2, 3, ... Naming representations fc+'_rep'+str(n) should be sufficient to identify each representation in a geodatabase uniquely, and allow for multiple representations within a single feature class. It appears that ArcGIS provides no means of scripting an inventory of representations within feature class or geodatabase. So, the convenience of establishing a coded-value domain that ties representation rule IDs (consecutive integers) to some sort of useful text identifier becomes a necessity for flagging the presence of a representation: One CAN script the inventory of domains in a geodatabase. Run arcpy.da.ListDomains. Check the result for names of the form <featureClassName>_rep??_Rule and voila, you've got a list of representations (and their associated feature classes) in the geodatabase. Moral: If you add a representation, be sure to add an associated coded-value domain and name it appropriately! """ except: addMsgAndPrint(arcpy.GetMessages(2)) if trackEdits: addTracking(fc) if trackEdits: addMsgAndPrint(' Tables ') arcpy.env.workspace = thisDB for aTable in tables: if aTable <> 'GeoMaterialDict': addTracking(aTable)
"\"- Select Condition -\"", "PYTHON") # ---------------------------------------------------------------------------------------------- Set up Domains desc = arcpy.Describe(watershedGDB_path) listOfDomains = [] domains = desc.Domains for domain in domains: listOfDomains.append(domain) del desc, domains if not "LandUse_Domain" in listOfDomains: arcpy.TableToDomain_management(TR_55_LU_Lookup, "LandUseDesc", "LandUseDesc", watershedGDB_path, "LandUse_Domain", "LandUse_Domain", "REPLACE") if not "Hydro_Domain" in listOfDomains: arcpy.TableToDomain_management(Hydro_Groups_Lookup, "HydrolGRP", "HydrolGRP", watershedGDB_path, "Hydro_Domain", "Hydro_Domain", "REPLACE") if not "Condition_Domain" in listOfDomains: arcpy.TableToDomain_management(Condition_Lookup, "CONDITION", "CONDITION", watershedGDB_path, "Condition_Domain", "Condition_Domain", "REPLACE") del listOfDomains
code_field = 'code' desc_field = 'description' # Create dictionary of property OIDs and names props_dict = {k.split(' - ')[0]: k.split(' - ')[1] for k in properties.keys()} # Create universal domains in gdb universal_domains = [ domain_act_status, domain_act_plant_pattern, domain_act_plant_stock, domain_harv_status, domain_s_point, domain_s_line, domain_s_poly ] try: for domain in universal_domains: domain_path = os.path.join(domains_dir, domain + '.csv') arcpy.TableToDomain_management(domain_path, code_field, desc_field, sde_connection, domain, domain, 'REPLACE') print('...Domain created for: {0}'.format(domain)) GWRutils.logMessage(log, '...Domain created for: {0}'.format(domain)) except Exception as e: print('Failed to create domains from universal domain csvs! Exiting...') print(e) GWRutils.logMessage( log, 'Failed to create domains from universal domain' + ' csvs! Exiting...') GWRutils.logMessage(log, str(e)) sys.exit() # Assign universal domains to corresponding datasets try:
tabname = '' arcpy.AddMessage(gdb) for fpath in glob.glob(os.path.dirname(os.path.realpath(__file__)) + r'/2015 Domain Tab Delimited Text Files/' + gdb + r'*.tab'): tabname = fpath if tabname: with open(tabname, 'r') as f: first_line = f.readline() fieldNameList = re.split(r'\s+', first_line) # Loop through fields for fieldName in fieldNameList: if fieldName[-4:] != 'DESC' and fieldName != '': arcpy.AddMessage('\nAssigning domain(s) to table ' + fieldName + '...') arcpy.TableToDomain_management(tabname, fieldName, fieldName + 'DESC', workspace, gdb+ '_' + fieldName, '', 'APPEND') if gdb in CRASH_LEVEL or gdb == 'zshp': if fieldName != 'SCHDST1011': try: arcpy.AssignDomainToField_management(workspace + crashLevelFC, fieldName, gdb+ '_' + fieldName) except: arcpy.AddMessage('Domain for ' + fieldName + ' not included') arcpy.AddMessage('done!') if gdb in VEHICLE_LEVEL: arcpy.AssignDomainToField_management(workspace + vehicleLevelTab, fieldName, gdb+ '_' + fieldName) arcpy.AddMessage('done!') if gdb in PERSON_LEVEL: if personAppend: arcpy.AssignDomainToField_management(workspace + personLevelTab, fieldName, gdb+ '_' + fieldName) arcpy.AddMessage('done!')
def main(): # GET PARAMETER VALUES Analysis_Area = arcpy.GetParameterAsText(0) Dist_Lek = arcpy.GetParameterAsText(1) Current_Anthro_Features_Provided = arcpy.GetParameterAsText(2) # optional Project_Folder = arcpy.GetParameterAsText(3) Project_Name = arcpy.GetParameterAsText(4) # optional # DEFINE DIRECTORIES & PATH NAMES FOR FOLDERS & GBDs # Get the pathname to this script scriptPath = sys.path[0] arcpy.AddMessage("Script folder: " + scriptPath) # Construct pathname to workspace workspace = arcpy.Describe(Analysis_Area).path arcpy.AddMessage("Project geodatabase: " + workspace) # Instantiate a ccsStandard object ccsStandard = ccslib.ccsStandard(workspace, scriptPath) # ENVIRONMENT SETTINGS # Set workspaces arcpy.env.workspace = workspace scratch_folder = os.path.join(arcpy.Describe(workspace).path, 'scratch') if arcpy.Exists(scratch_folder): pass else: arcpy.CreateFolder_management( arcpy.Describe(workspace).path, 'scratch') arcpy.env.scratchWorkspace = scratch_folder # Overwrite outputs arcpy.env.overwriteOutput = True # DEFINE GLOBAL VARIABLES AnthroAttributeTable = ccsStandard.AnthroAttributeTable emptyRaster = ccsStandard.EmptyRaster inputDataPath = ccsStandard.InputDataPath # Filenames for feature classes or rasters used by this script MAP_UNITS = "Map_Units" ANALYSIS_AREA = "Analysis_Area" # provided CURRENT_ANTHRO_FEATURES = "Current_Anthro_Features" CREDIT_PROJECT_AREA = "Credit_Project_Area" PROPOSED_MODIFIED_FEATURES = "Proposed_Modified_Features" # Filenames for feature classes or rasters created by this script CURRENT_ANTHRO_DISTURBANCE = "Current_Anthro_Disturbance" PROJECTED_ANTHRO_DISTURBANCE = "Projected_Anthro_Disturbance" MAP_UNITS_DISSOLVE = "Map_Units_Dissolve" CURRENT_MGMT_CAT = "Current_Mgmt_Cat" CURRENT_WMZ = "Current_WMZ" CURRENT_PMU = "Current_PMU" CURRENT_PRECIP = "Current_Precip" # ------------------------------------------------------------------------ # FUNCTION CALLS # Check out Spatial Analyst extension ccslib.CheckOutSpatialAnalyst() # Check Analysis_Area feature = Analysis_Area expected_fcs = [MAP_UNITS, ANALYSIS_AREA, CREDIT_PROJECT_AREA] ccslib.CheckPolygonInput(feature, expected_fcs=expected_fcs) # Set up flag for projects that propose to modify anthro features includes_anthro_mod = False # Check for existence of 'Proposed_Modified_Features' if arcpy.Exists(PROPOSED_MODIFIED_FEATURES): # Update flag includes_anthro_mod = True # Copy Dist_Lek to geodatabase # Create Current_Anthro_Features layer, or copy provided into geodatabase if Current_Anthro_Features_Provided: # Clear selection, if present ccslib.ClearSelectedFeatures(Current_Anthro_Features_Provided) # Check Current_Anthro_Features feature = Current_Anthro_Features_Provided required_fields = ["Type", "Subtype"] no_null_fields = None expected_fcs = None ccslib.CheckPolygonInput(feature, required_fields, expected_fcs, no_null_fields) # Update message arcpy.AddMessage("Copying Current_Anthro_Features to project " "geodatabase") # Copy Current_Anthro_Features to geodatabase provided_input = Current_Anthro_Features_Provided parameter_name = CURRENT_ANTHRO_FEATURES preserve_existing = True Current_Anthro_Features = ccslib.AdoptParameter( provided_input, parameter_name, preserve_existing) else: # Update message arcpy.AddMessage("Merging all clipped anthropogenic features to " "create the Current_Anthro_Features layer") # Merge features (selecting only polygon features) fileList = arcpy.ListFeatureClasses("Anthro*Clip", feature_type="Polygon") out_name = CURRENT_ANTHRO_FEATURES Current_Anthro_Features = ccslib.MergeFeatures(fileList, out_name) # Simplify fields allowable_fields = ["Type", "Subtype", "SubtypeID", "Subtype_As_Modified"] ccslib.SimplifyFields(Current_Anthro_Features, allowable_fields) # Remove subtypes from Current_Anthro_Features feature = Current_Anthro_Features try: subtypes = arcpy.da.ListSubtypes(feature) for subtype in subtypes: arcpy.RemoveSubtype_management(feature, subtype) arcpy.AddMessage("Subtype removed") except arcpy.ExecuteError: arcpy.AddMessage("Could not remove subtypes from " "Current_Anthro_Features") # Add Domains for Type and Subtype arcpy.RemoveDomainFromField_management(feature, "Type") try: domainName = "Type" arcpy.CreateDomain_management(workspace, domainName, "Valid " + domainName + "s", "TEXT", "CODED") typeList = [ row[0] for row in arcpy.da.SearchCursor(AnthroAttributeTable, "Type") ] for code in typeList: arcpy.AddCodedValueToDomain_management(workspace, domainName, code, code) except arcpy.ExecuteError: arcpy.AddMessage("Could not add domains for " "Current_Anthro_Features") arcpy.AssignDomainToField_management(feature, "Type", "Type") arcpy.RemoveDomainFromField_management(feature, "Subtype") arcpy.TableToDomain_management(AnthroAttributeTable, "Subtype", "Subtype", workspace, "Subtype", "Valid anthropogenic subtypes", "REPLACE") arcpy.AssignDomainToField_management(feature, "Subtype", "Subtype") # Update Message arcpy.AddMessage("Calculating Current Anthropogenic Disturbance") # Calculate Current_Anthro_Disturbance extent_fc = Analysis_Area anthro_features = Current_Anthro_Features term = ccsStandard.CreditTerms[0] Current_Anthro_Disturbance = ccslib.CalcAnthroDist(extent_fc, anthro_features, emptyRaster, AnthroAttributeTable, term) Current_Anthro_Disturbance.save(CURRENT_ANTHRO_DISTURBANCE) # Update message arcpy.AddMessage("Current_Anthro_Disturbance Calculated") # If the project proposes to modify existing anthropogenic features, # calculate post-project anthropogenic disturbance (uplift) if includes_anthro_mod: # Calculate uplift extent_fc = Analysis_Area anthro_features = Current_Anthro_Features term = ccsStandard.CreditTerms[1] field = "Subtype_As_Modified" Projected_Anthro_Disturbance = ccslib.CalcAnthroDist( extent_fc, anthro_features, emptyRaster, AnthroAttributeTable, term, field) Projected_Anthro_Disturbance.save(PROJECTED_ANTHRO_DISTURBANCE) # Update message arcpy.AddMessage("Projected_Anthro_Disturbance Calculated") arcpy.AddMessage("Creating pre-defined map units of PJ") Map_Units = MAP_UNITS if len(arcpy.ListFields(Map_Units, "Conifer_Phase")) == 0: # Create pre-defined map units for PJ # Intersect the Map_Units layer with the PJ layer in_feature = ccsStandard.PJ_Phases field_name = "Conifer_Phase" ccslib.CreatePreDefinedMapUnits(Map_Units, in_feature, field_name) # Remove unwanted fields from Map Units feature class allowable_fields = [ "Conifer_Phase", "Map_Unit_ID", "Map_Unit_Name", "Meadow", "Notes", "Indirect" ] ccslib.SimplifyFields(Map_Units, allowable_fields) # Update message arcpy.AddMessage("Merging indirect benefits area and map units layer") # Combine the Map Units layer and Indirect Impact Layer indirect_benefit_area = CREDIT_PROJECT_AREA mgmt_map_units = Map_Units Map_Units = ccslib.AddIndirectBenefitArea(indirect_benefit_area, mgmt_map_units) # Add Map Units layer to map document layerFile = ccsStandard.getLayerFile("Map_Units.lyr") ccslib.AddToMap(Map_Units, layerFile) else: # Add Indirect field to Map Units layer and populate with False # Add field "Indirect" feature = MAP_UNITS fieldsToAdd = ["Indirect"] fieldTypes = ["TEXT"] ccslib.AddFields(feature, fieldsToAdd, fieldTypes) # Update field to equal "False" with arcpy.da.UpdateCursor(feature, fieldsToAdd) as cursor: for row in cursor: row[0] = "False" cursor.updateRow(row) # Calculate local scale modifiers for Current condition extent_fc = Analysis_Area anthro_disturbance = CURRENT_ANTHRO_DISTURBANCE term = ccsStandard.CreditTerms[0] ccslib.CalcModifiers(extent_fc, inputDataPath, Dist_Lek, anthro_disturbance, term) # Calculate local scale modifiers for Projected condition # Determine which anthropogenic disturbance raster to use extent_fc = Analysis_Area if arcpy.Exists(PROJECTED_ANTHRO_DISTURBANCE): anthro_disturbance = PROJECTED_ANTHRO_DISTURBANCE else: anthro_disturbance = CURRENT_ANTHRO_DISTURBANCE term = ccsStandard.CreditTerms[1] ccslib.CalcModifiers(extent_fc, inputDataPath, Dist_Lek, anthro_disturbance, term, PJ_removal=True) # Update message arcpy.AddMessage("Dissolving all multi-part map units to create " "Map_Units_Dissolve") # Dissolve Map Units allowable_fields = [ "Map_Unit_ID", "Map_Unit_Name", "Meadow", "Conifer_Phase", "BROTEC", "Indirect" ] out_name = MAP_UNITS_DISSOLVE anthro_features = Current_Anthro_Features Map_Units_Dissolve = ccslib.DissolveMapUnits(MAP_UNITS, allowable_fields, out_name, anthro_features) # Update message arcpy.AddMessage("Adding Map_Units_Dissolve to map") # Add layer to map document feature = Map_Units_Dissolve layerFile = ccsStandard.getLayerFile("Map_Units.lyr") ccslib.AddToMap(feature, layerFile, zoom_to=True) # Update message arcpy.AddMessage("Calculating area in acres for each map unit") # Calculate Area ccslib.CalcAcres(Map_Units_Dissolve) # Initialize a list to track proportion feature classes prop_fcs = [] # Update message arcpy.AddMessage("Calculating Proportion within each precipitation zone") # Calculate Proportion of each map unit in each Precip Zone in_feature = os.path.join(inputDataPath, "Precip") out_feature_class = CURRENT_PRECIP field_name = "Precip_Proportion" ccslib.CalcProportion(Map_Units_Dissolve, in_feature, out_feature_class, field_name) prop_fcs.append(out_feature_class) # Update message arcpy.AddMessage("Calculating Management Importance Factor") # Calculate Proportion of each map unit in each Management Category in_feature = os.path.join(inputDataPath, "Mgmt_Cat") out_feature_class = CURRENT_MGMT_CAT field_name = "Mgmt_Proportion" ccslib.CalcProportion(Map_Units_Dissolve, in_feature, out_feature_class, field_name) prop_fcs.append(out_feature_class) # Update message arcpy.AddMessage("Evaluating WAFWA Management Zone") # Calculate Proportion in each map unit in each WAFWA Zone in_feature = os.path.join(inputDataPath, "NV_WAFWA") out_feature_class = CURRENT_WMZ field_name = "WMZ_Proportion" ccslib.CalcProportion(Map_Units_Dissolve, in_feature, out_feature_class, field_name) prop_fcs.append(out_feature_class) # Update message arcpy.AddMessage("Evaluating Priority Management Unit") # Calculate Proportion in each map unit in each PMU in_feature = os.path.join(inputDataPath, "NV_PMU") out_feature_class = CURRENT_PMU field_name = "PMU_Proportion" ccslib.CalcProportion(Map_Units_Dissolve, in_feature, out_feature_class, field_name) prop_fcs.append(out_feature_class) # Delete unnecessary fields in proportion feature classes allowable_fields = [ "Map_Unit_ID", "Management", "Mgmt_zone", "PMU_NAME", "Precip", "Mgmt_Proportion", "WMZ_Proportion", "PMU_Proportion", "Precip_Proportion" ] for feature in prop_fcs: ccslib.SimplifyFields(feature, allowable_fields) # Set processing extent to Map_Units layer arcpy.env.extent = arcpy.Describe(Map_Units_Dissolve).extent # Calculate the average HSI values per map unit for each map unit HSIseasons = ccsStandard.HSISeasons for season in HSIseasons: # Update message arcpy.AddMessage("Summarizing " + season + " HSI") # Calculate zonal statistics for each map unit inZoneData = Map_Units_Dissolve inValueRaster = os.path.join(inputDataPath, season + "_HSI") zoneField = "Map_Unit_ID" outTable = "ZonalStats_" + season + "_HSI" ccslib.CalcZonalStats(inZoneData, zoneField, inValueRaster, outTable) # Join the zonal statistic to the Map Units Dissolve table fieldName = season + "_HSI" ccslib.JoinMeanToTable(inZoneData, outTable, zoneField, fieldName) # Update message arcpy.AddMessage("Calculating PJ cover per map unit") # Calculate the average pinon-juniper cover per map unit inZoneData = Map_Units_Dissolve inValueRaster = os.path.join(inputDataPath, "PJ_Cover") zoneField = "Map_Unit_ID" outTable = "ZonalStats_PJCover" ccslib.CalcZonalStats(inZoneData, zoneField, inValueRaster, outTable) # Join the zonal statistic to the Map Units Dissolve table fieldName = "PJ_Cover" ccslib.JoinMeanToTable(inZoneData, outTable, zoneField, fieldName) # Calculate the average seasonal modifier values per map unit and # join to Map_Unit_Dissolve table terms = ccsStandard.CreditTerms seasons = ccsStandard.Seasons for term in terms: for season in seasons: # Update message arcpy.AddMessage("Summarizing " + term + "_Local_" + season) # Calculate zonal statistics for each map unit inZoneData = Map_Units_Dissolve inValueRaster = term + "_Local_" + season zoneField = "Map_Unit_ID" outTable = "ZonalStats_" + term + season ccslib.CalcZonalStats(inZoneData, zoneField, inValueRaster, outTable) # Join the zonal statistic to the Map Units Dissolve table fieldName = term + "_" + season ccslib.JoinMeanToTable(inZoneData, outTable, zoneField, fieldName) # Calculate impact intensity for credit project try: ccslib.calcCreditBenefit(inputDataPath, includes_anthro_mod) # Add credit project quality to map layerFile = ccsStandard.getLayerFile("Credit_Project_Benefit.lyr") ccslib.AddToMap("Credit_Quality", layerFile, zoom_to=True) except: pass # Remove uplift modifier for map units that do not qualify # Select map units of Indirect if project involves anthro # feature modification if includes_anthro_mod: feature = MAP_UNITS_DISSOLVE arcpy.MakeFeatureLayer_management(feature, "lyr") where_clause = """({} = '{}') AND ({} <> {} OR {} <> {} OR {} <> {})""".format( arcpy.AddFieldDelimiters(feature, "Indirect"), "True", arcpy.AddFieldDelimiters(feature, "Projected_Breed"), arcpy.AddFieldDelimiters(feature, "Current_Breed"), arcpy.AddFieldDelimiters(feature, "Projected_LBR"), arcpy.AddFieldDelimiters(feature, "Current_LBR"), arcpy.AddFieldDelimiters(feature, "Projected_Winter"), arcpy.AddFieldDelimiters(feature, "Current_Winter")) arcpy.SelectLayerByAttribute_management(feature, "NEW_SELECTION", where_clause) test = arcpy.GetCount_management(feature) count = int(test.getOutput(0)) if count > 0: # Update message arcpy.AddMessage("Confirming removal of PJ cover credits meet " "eligibility criteria (if applicable)") # Substitute Projected_Anthro_Disturbance if it exists extent_fc = Analysis_Area if arcpy.Exists(PROJECTED_ANTHRO_DISTURBANCE): anthroDisturbance = PROJECTED_ANTHRO_DISTURBANCE else: anthroDisturbance = CURRENT_ANTHRO_DISTURBANCE # Repeat calculation of modifiers w/o PJ_uplift term = ccsStandard.CreditTerms[1] ccslib.CalcModifiers(extent_fc, inputDataPath, Dist_Lek, anthroDisturbance, term, PJ_removal=False, suffix="noPJ") # Repeat joins to table for season in seasons: # Calculate zonal statistics for each map unit inZoneData = Map_Units_Dissolve inValueRaster = term + "_Local_" + season + "_noPJ" zoneField = "Map_Unit_ID" outTable = "ZonalStats_" + term + season ccslib.CalcZonalStats(inZoneData, zoneField, inValueRaster, outTable) # Join the zonal statistic to the Map Units Dissolve table fieldName = term + "_" + season + "_noPJ" ccslib.JoinMeanToTable(inZoneData, outTable, zoneField, fieldName) # Overwrite Projected seasonal local scale scores overwrite_field = ccsStandard.CreditTerms[1] + "_" + season with arcpy.da.UpdateCursor( feature, [fieldName, overwrite_field]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) # Clean up arcpy.DeleteField_management(feature, fieldName) # Clean up arcpy.SelectLayerByAttribute_management(feature, "CLEAR_SELECTION") arcpy.Delete_management("lyr") # Add transect field to Map_Units_Dissolve fields = ["Transects"] fieldTypes = ["SHORT"] ccslib.AddFields(Map_Units_Dissolve, fields, fieldTypes) # Export data to Excel input_Tables = [ MAP_UNITS_DISSOLVE, CURRENT_MGMT_CAT, CURRENT_WMZ, CURRENT_PMU, CURRENT_PRECIP ] for table in input_Tables: ccslib.ExportToExcel(table, Project_Folder, Project_Name) # Clean up arcpy.Delete_management("in_memory") # Save map document if arcpy.ListInstallations()[0] == 'arcgispro': p = arcpy.mp.ArcGISProject("CURRENT") p.save() else: mxd = arcpy.mapping.MapDocument("CURRENT") mxd.save()
# Update the attribute table with the GlobalID before updating the domain with arcpy.da.SearchCursor(domTable, ['GlobalID']) as cursor: for row in cursor: RowID = str(row[0]) del row del cursor cursor = arcpy.da.UpdateCursor(domTable,[codeField]) for row in cursor: row[0]=RowID cursor.updateRow(row) del cursor, row msg="Update {} domain".format(TbleText) arcpy.AddMessage(msg) try: arcpy.TableToDomain_management(domTable, codeField, descField, wrkspc, domName, domDesc, updateOp) arcpy.SortCodedValueDomain_management(wrkspc, domName, "DESCRIPTION", "ASCENDING") except: msg="Unable to update {} domain".format(TbleText) arcpy.AddWarning(msg) pass else: msg="No {} to update".format(TbleText) arcpy.AddMessage(msg) else: arcpy.AddMessage("You have not provided a valid Subject Name") if LeadAgency: #Set local parameters LeadInfo = path.join(wrkspc,"Lead_Agency")
try: if arcpy.Exists(gdb): print "deleting old db" arcpy.Delete_management(gdb) logger.logGPMsg() print "creating geodatabase" arcpy.CreateFileGDB_management(out_folder, dbName) logger.logGPMsg() print "creating coded value domains" for d in domains: print d arcpy.TableToDomain_management(domainsXLS + "\\" + d + "$", "CODE", "CODE", gdb, "roadkill_" + d, "roadkill_" + d) logger.logGPMsg() print "creating xyphoid range domain" arcpy.CreateDomain_management(gdb, xyphoidDomainName, xyphoidDomainName, "SHORT", "RANGE") logger.logGPMsg() arcpy.SetValueForRangeDomain_management(gdb, xyphoidDomainName, xyphoidMin, xyphoidMax) logger.logGPMsg() print "creating Reports fc" srPath = os.path.join( arcpy.GetInstallInfo()["InstallDir"], r"Coordinate Systems\Projected Coordinate Systems\UTM\NAD 1983\NAD 1983 UTM Zone 12N.prj"
import arcpy from arcpy import env try: #Set local parameters domTable = arcpy.GetParameterAsText(0) codeField = arcpy.GetParameterAsText(1) descField = arcpy.GetParameterAsText(1) dWorkspace = arcpy.GetParameterAsText(2) domName = "Def_POS_Description" updateOption = "REPLACE" # Process: Create a domain from an existing table arcpy.TableToDomain_management(domTable, codeField, descField, dWorkspace, domName, update_option=updateOption) arcpy.SetParameter(3, dWorkspace) except arcpy.ExecuteError: # Get the tool error messages msgs = arcpy.GetMessages() arcpy.AddError(msgs) #print msgs #UPDATE print(msgs) except: tb = sys.exc_info()[2] #print "Line %i" % tb.tb_lineno #UPDATE print("Line %i" % tb.tb_lineno)
def createUN(jsonFile, outGDB): cfgStr = open(jsonFile, 'r', encoding='gbk').read() unObj = json.loads(cfgStr) unName = unObj["unName"] # 创建un和结构网络 arcpy.env.preserveGlobalIds = True arcpy.env.overwriteOutput = True arcpy.CreateFileGDB_management(os.path.dirname(outGDB), os.path.basename(outGDB)) arcpy.pt.StageUtilityNetwork(outGDB, unObj["territoryFeaCls"], unObj["feaDS"], unName) # Tips:尽量使用相对路径,如有工具不支持再改用绝对路径 arcpy.env.workspace = os.path.join(outGDB, unObj["feaDS"]) # 导入fieldDomains,domain是面向GDB级的物理设置 for domain in unObj["fieldDomains"]: dName = domain["name"] if domain.get("dtype") == "RANGE": arcpy.CreateDomain_management(outGDB, dName, domain.get("desc", dName), domain.get("ftype", "SHORT"), "RANGE") arcpy.SetValueForRangeDomain_management(outGDB, dName, domain['min'], domain['max']) continue table = arcpy.management.CreateTable('in_memory', dName)[0] # ?[0] arcpy.AddField_management(table, 'code', domain.get("ftype", "SHORT")) arcpy.AddField_management(table, 'name', 'TEXT', field_length=254) with arcpy.da.InsertCursor(table, ('code', 'name')) as cur: for v in domain["values"]: cur.insertRow((v["code"], v["name"])) arcpy.TableToDomain_management(table, 'code', 'name', outGDB, dName, domain.get("desc", dName), update_option='REPLACE') arcpy.Delete_management(table) # 创建除了structure以外的域网络 for dnObj in unObj["domainNetworks"]: if dnObj["name"].lower() != "structure": arcpy.AddDomainNetwork_un(unName, dnObj["name"], dnObj["tierDef"], dnObj["controllerType"], dnObj.get("alias")) # 添加TerminalConfiguration,categories,netAttributes,这些是面向整个UN级的逻辑设置 # Tips:需要先创建至少一个域网络,才能添加TerminalConfiguration terminalConfigs = unObj.get("terminalConfigs") if terminalConfigs: for terminalCfg in terminalConfigs: if terminalCfg["dir"] == "DIRECTIONAL": arcpy.AddTerminalConfiguration_un( unName, terminalCfg["name"], "DIRECTIONAL", terminals_directional=terminalCfg["terminals"], valid_paths=terminalCfg["paths"], default_path=terminalCfg.get("default")) else: arcpy.AddTerminalConfiguration_un( unName, terminalCfg["name"], "BIDIRECTIONAL", terminals_bidirectional=terminalCfg["terminals"], valid_paths=terminalCfg["paths"], default_path=terminalCfg.get("default")) # TODO: 网络分组与分层的区别? categories = unObj.get("categories") if categories: ## 为什么加这种判断 for category in categories: arcpy.AddNetworkCategory_un(unName, category) # TODO:网络属性的可选设置有什么作用? netAttributes = unObj.get("netAttributes") if netAttributes: for attrib in netAttributes: arcpy.AddNetworkAttribute_un(unName, attrib["name"], attrib["type"], attrib.get("inline"), attrib.get("apportionable"), attrib.get("domain"), attrib.get("overridable"), attrib.get("nullable"), attrib.get("substitution"), attrib.get("attrToSubstitution")) # 添加子类,创建新字段,指定属性域,指定网络属性,这些是面向Table级的物理设置 for dnObj in unObj["domainNetworks"]: # 子类已经自动设置为ASSETGROUP字段,添加自定义值 subtypes = dnObj.get("subtypes") if subtypes: for subtype in subtypes: for v in subtype["values"]: arcpy.AddSubtype_management(subtype["feaCls"], v["code"], v["name"]) if subtype.get("default"): arcpy.SetDefaultSubtype_management(subtype["feaCls"], subtype.get("default")) # 添加自定义字段 newFields = dnObj.get("newFields") if newFields: for field in newFields: length = field.get( "length") if field["type"].upper() == "TEXT" else None arcpy.AddField_management(field["feaCls"], field["name"], field["type"], field_length=length, field_alias=field.get("alias")) # 为字段指定属性域 fDomains = dnObj.get("fieldDomains") if fDomains: for fd in fDomains: arcpy.AssignDomainToField_management(fd["feaCls"], fd["fieldName"], fd["domainName"], fd.get("subtypeCodes")) if fd.get("default"): arcpy.AssignDefaultToField_management( fd["feaCls"], fd["fieldName"], fd["default"], fd.get("subtypeCodes")) # 为字段指定网络属性 netAttributes = dnObj.get("netAttributes") if netAttributes: for attribute in netAttributes: for field in attribute["fields"]: fc, fName = field.split("/") fObj = arcpy.ListFields(fc, fName) if fObj: arcpy.SetNetworkAttribute_un(unName, attribute["name"], dnObj["name"], fc, fName) # 为资产指定多项配置:端子配置、分组、边连通性、角色,这些是面向资产级的逻辑设置 with open(unObj.get("assetsCSV", "not exist"), 'r', encoding='gbk') as fp: reader = csv.reader(fp) # 读取列为列表 header = next( reader ) # ['domainNet', 'feaCls', 'assetName', 'categories', 'terminalCfg', 'edgeConnectivity', 'roleType', 'deletionType', 'viewScale', 'splitType'] assetCfg = namedtuple('assetCfg', header) for row in reader: row = assetCfg(*row) asset = row.assetName.split('/') if row.terminalCfg: arcpy.SetTerminalConfiguration_un(unName, row.domainNet, row.feaCls, *asset, row.terminalCfg) if row.categories: arcpy.SetNetworkCategory_un(unName, row.domainNet, row.feaCls, *asset, row.categories) if row.edgeConnectivity: # 边联通非空 arcpy.SetEdgeConnectivity_un(unName, row.domainNet, row.feaCls, *asset, row.edgeConnectivity) if row.roleType: arcpy.SetAssociationRole_un(unName, row.domainNet, row.feaCls, *asset, row.roleType, row.deletionType, row.viewScale, row.splitType) # 创建tier,并设置子网定义,这些是面向子网级的逻辑设置 # TODO: subnetwork_field_name有什么作用?subnetDef还有很多可选设置 for dnObj in unObj["domainNetworks"]: dnName = dnObj["name"] if dnName.lower() != "structure": # tierGroups tierGroups = dnObj.get("tierGroups") if tierGroups and dnObj["tierDef"] == "HIERARCHICAL": for groupName in tierGroups: arcpy.AddTierGroup_un(unName, dnName, groupName) tiers = dnObj.get("tiers") if tiers: for tier in tiers: if dnObj["tierDef"] == "HIERARCHICAL": arcpy.AddTier_un( unName, dnName, tier["name"], tier["rank"], topology_type="MESH", tier_group_name=tier.get("groupName"), subnetwork_field_name=tier["subnetField"]) else: arcpy.AddTier_un(unName, dnName, tier["name"], tier["rank"], topology_type=tier["topo"]) arcpy.SetSubnetworkDefinition_un( unName, dnName, tier["name"], tier["disjoint"], tier["devices"], tier["controllers"], tier.get("lines"), tier.get("aggregated"), tier.get("diagrams"), include_barriers=tier.get("barriers"), traversability_scope=tier.get("traverse")) # TODO: 导入rule arcpy.ImportRules_un(unName, "All", "E:/ArcGIS/unnet/rules.csv") # TODO: 导入数据 # 数据导入是基于子类的,把要素类路径写入到子类中,修改了demoUN域网络的子类型值 for dnObj in unObj["domainNetworks"]: subtypes = dnObj.get("subtypes") if subtypes: for subtype in subtypes: for v in subtype["values"]: arcpy.Append_management(subtype["path"], subtype["feaCls"], "NO_TEST", subtype=v["name"])
for workspace in workspaces: #name = arcpy.Describe(workspace).name # Set local variables # in_table = workspace + "\\DataSources" code_field = "DataSources_ID" description_field = "Source" in_workspace = workspace domain_name = "d_DataSources" domain_description = "Data Sources" update_option = "APPEND" try: print "Updating " + domain_name +". . ." # Process: Append the feature classes into the empty feature class arcpy.TableToDomain_management (in_table, code_field, description_field, in_workspace, domain_name, domain_description, update_option) except: # If an error occurred while running a tool print the messages print arcpy.GetMessages() print "d_DataSources domain update completed. . ."
row.Area_Name = xd rows.insertRow(row) del rows del row domTable = fc1 codeField = "Area_Name" descField = "Area_Name" dWorkspace = workspc domName = "Area_Names" domDesc = "Search area names" # Process: Create a domain from an existing table arcpy.TableToDomain_management(domTable, codeField, descField, dWorkspace, domName, domDesc,"REPLACE") del fc1 ##except: ## # Get the tool error messages ## # ## msgs = "All tasks have been processed" ## ## # Return tool error messages for use with a script tool ## # ## arcpy.AddWarning(msgs) ## # Print tool error messages for use in Python/PythonWin
Operation_Period = "Operation_Period" Incident_Staff = "Incident_Staff" Teams = "Teams" TeamMemb = "Team_Members" Subject_Information = "Subject_Information" Scenarios = "Scenarios" Probability_Regions = "Probability_Regions" Assignments = "Assignments" Clues_Point = "Clues_Point" # Process: Table To Domain (1) cLead = arcpy.GetCount_management(Lead_Agency) if int(cLead.getOutput(0)) > 0: arcpy.AddMessage("update Lead Agency domain") arcpy.TableToDomain_management(Lead_Agency, "Lead_Agency", "Lead_Agency", Workspace, "Lead_Agency", "Lead_Agency", "REPLACE") try: arcpy.SortCodedValueDomain_management(Workspace, "Lead_Agency", "DESCRIPTION", "ASCENDING") except: pass else: arcpy.AddMessage("No Lead Agency information to update") # Process: Table To Domain (2) cIncident = arcpy.GetCount_management(Incident_Information) if int(cIncident.getOutput(0)) > 0: arcpy.AddMessage("update Incident Information domain") arcpy.TableToDomain_management(Incident_Information, "Incident_Name", "Incident_Name", Workspace, "Incident_Name",
if domainTables: # describe present domains, estrablish and apply if needed desc = arcpy.Describe(watershedGDB_path) listOfDomains = [] domains = desc.Domains for domain in domains: listOfDomains.append(domain) del desc, domains if not "Reach_Domain" in listOfDomains: arcpy.TableToDomain_management(ID_Table, "IDENT", "ID_DESC", watershedGDB_path, "Reach_Domain", "Reach_Domain", "REPLACE") if not "Type_Domain" in listOfDomains: arcpy.TableToDomain_management(Reach_Table, "TYPE", "TYPE", watershedGDB_path, "Type_Domain", "Type_Domain", "REPLACE") del listOfDomains, ID_Table, Reach_Table, domainTables # Assign domain to flow length fields for User Edits... arcpy.AssignDomainToField_management(Flow_Length, "Reach", "Reach_Domain", "") arcpy.AssignDomainToField_management(Flow_Length, "TYPE",
import os import xlrd import arcpy dirname = os.path.dirname(__file__) # Domains Table / Excel file domainsXLS = os.path.join(dirname, 'Domains_Sample.xlsx') # Fiels sheet in_table = domainsXLS+"\Fields_List$" if __name__ == "__main__": # open excel file workbook = xlrd.open_workbook(domainsXLS) # Get sheet by name worksheet = workbook.sheet_by_name("Fields_List") num_rows = worksheet.ncols - 1 fields = [t.encode('utf8') for t in worksheet.row_values(0)] curr_row = 0 while curr_row < num_rows: curr_row += 1 # Get Field's name field_name = worksheet.col_values(curr_row)[0].strip() # Create domain into the Geodatabase arcpy.TableToDomain_management(in_table=in_table, code_field=field_name, description_field=field_name, in_workspace="Database Connections\\regis.sde", domain_name=field_name, domain_description=field_name, update_option="REPLACE") # Assign domain to the field arcpy.AssignDomainToField_management(in_table="Database Connections\\regis.sde\\regis.dbo.Parcelle", field_name=field_name, domain_name=field_name)
print "Domains aleady exist" # If LookUpTable Name does not exist in ESRI domain names else: # A list that holds all fields that have a SHORT INT data type intList = [] # If not in the FLOAT and INT lists if lu not in intList: print "DOUBLE - " + lu # Convert the specific Oracle LookUp table to an ESRI domain arcpy.TableToDomain_management(lu, secondField, Desc, dWorkspace, lu, "", "REPLACE") # If in INT lists else: # Manually add Domain if Table field is a SHORT INT for int2 in intList: if int2 == lu: print "Short Int: " + lu # Create Domain arcpy.CreateDomain_management(dWorkspace2, lu, Desc2, "SHORT", "CODED") # List all fields in the current table