def calcValue(row, names, calcString): # calculate a value based on source fields and/or other expressions outVal = "" calcList = calcString.split("|") for strVal in calcList: if strVal in names: try: fidx = names.index(strVal) if str(row[fidx]) != row[fidx]: outVal += str(row[fidx]) else: outVal += '"' + str(row[fidx]) + '"' except: outVal += strVal else: outVal += strVal if len(calcList) == 1 and outVal == '': outVal = calcList[0] try: if (outVal != "" and outVal != None): outVal = eval(outVal) except: dla.addMessage("Error evaluating:" + outVal) dla.showTraceback() dla.addError("Error calculating field values:" + outVal) outVal = None return outVal
def writeDataSample(xmlDoc,root,sourceFields,sourcePath,rowLimit): # get a subset of data for preview and other purposes i = 0 data = xmlDoc.createElement("Data") root.appendChild(data) cursor = arcpy.da.SearchCursor(sourcePath,sourceFields) i = 0 #dla.addMessage(str(sourceFields)) for row in cursor: if i == 10: return xrow = xmlDoc.createElement("Row") for f in range(0,len(sourceFields)): try: xrow.setAttribute(sourceFields[f],str(row[f])) # handles numeric values and simple strings except: try: attrval = row[f].encode('utf-8', errors='replace').decode('utf-8',errors='backslashreplace') # handles non-utf-8 codes xrow.setAttribute(sourceFields[f],attrval) except: dla.showTraceback() pass # backslashreplace should never throw a unicode decode error... data.appendChild(xrow) i += 1 del cursor
def calcValue(row,names,calcString): # calculate a value based on source fields and/or other expressions outVal = "" calcList = calcString.split("|") for strVal in calcList: if strVal in names: try: fidx = names.index(strVal) if str(row[fidx]) != row[fidx]: outVal += str(row[fidx]) else: outVal += '"' + str(row[fidx]) + '"' except: outVal += strVal else: outVal += strVal if len(calcList) == 1 and outVal == '': outVal = calcList[0] try: if(outVal != "" and outVal != None): outVal = eval(outVal) except: dla.addMessage("Error evaluating:" + outVal) dla.showTraceback() dla.addError("Error calculating field values:" + outVal) outVal = None return outVal
def writeDataSample(xmlDoc,root,sourceFields,sourcePath,rowLimit): # get a subset of data for preview and other purposes i = 0 data = xmlDoc.createElement("Data") root.appendChild(data) #if sourcePath.endswith('.lyrx'): # desc = arcpy.Describe(sourcePath) # dataset path/source as parameter # fields = desc.fields # sourceFields = [field.name for field in fields] cursor = arcpy.da.SearchCursor(sourcePath,sourceFields) i = 0 prefixes = [] for row in cursor: if i == 10: return xrow = xmlDoc.createElement("Row") for f in range(0,len(sourceFields)): try: xrow.setAttribute(sourceFields[f],str(row[f])) # handles numeric values and simple strings except: try: attrval = row[f].encode('utf-8', errors='replace').decode('utf-8',errors='backslashreplace') # handles non-utf-8 codes xrow.setAttribute(sourceFields[f],attrval) except: dla.showTraceback() pass # backslashreplace should never throw a unicode decode error... data.appendChild(xrow) i += 1 del cursor
def deleteFeatures(sourceLayer,targelUrl,expr): # delete features using chunks of _chunkSize retval = False error = False # delete section ids = getOIDs(targelUrl,expr) try: lenDeleted = 100 #Chunk deletes using chunk size at a time featuresProcessed = 0 numFeat = len(ids) if numFeat == 0: dla.addMessage("0 Features to Delete, exiting") return True # nothing to delete is OK if numFeat > _chunkSize: chunk = _chunkSize else: chunk = numFeat arcpy.SetProgressor("default","Deleting Features") while featuresProcessed < numFeat and error == False: #Chunk deletes using chunk size at a time next = featuresProcessed + chunk msg = "Deleting features " + str(featuresProcessed) + ":" + str(next) dla.addMessage(msg) arcpy.SetProgressorLabel(msg) oids = ",".join(str(e) for e in ids[featuresProcessed:next]) url = targelUrl + '/deleteFeatures' token = dla.getSigninToken() params = {'f': 'pjson', 'objectIds': oids,'token':token} result = dla.sendRequest(url,params) try: if result['error'] != None: retval = False dla.addMessage("Delete features from Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: try: lenDeleted = len(result['deleteResults']) msg = str(lenDeleted) + " features deleted, " + str(featuresProcessed + chunk) + "/" + str(numFeat) print(msg) dla.addMessage(msg) retval = True except: retval = False error = True dla.showTraceback() dla.addMessage("Delete features from Feature Service failed") dla.addError(json.dumps(result)) featuresProcessed += chunk except: retval = False error = True dla.showTraceback() dla.addMessage("Delete features from Feature Service failed") pass return retval
def addFeatures(sourceLayer,targelUrl,expr): # add features using _chunkSize retval = False error = False # add section try: arcpy.SetProgressor("default","Adding Features") arcpy.SetProgressorLabel("Adding Features") featurejs = featureclass_to_json(sourceLayer) url = targelUrl + '/addFeatures' numFeat = len(featurejs['features']) if numFeat == 0: dla.addMessage("0 Features to Add, exiting") return True # nothing to add is OK if numFeat > _chunkSize: chunk = _chunkSize else: chunk = numFeat featuresProcessed = 0 while featuresProcessed < numFeat and error == False: next = featuresProcessed + chunk features = featurejs['features'][featuresProcessed:next] msg = "Adding features " + str(featuresProcessed) + ":" + str(next) dla.addMessage(msg) arcpy.SetProgressorLabel(msg) token = dla.getSigninToken() params = {'rollbackonfailure': 'true','f':'json', 'token':token, 'features': json.dumps(features)} result = dla.sendRequest(url,params) try: if result['error'] != None: retval = False dla.addMessage("Add features to Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: try: lenAdded = len(result['addResults']) msg = str(lenAdded) + " features added, " + str(featuresProcessed + chunk) + "/" + str(numFeat) print(msg) dla.addMessage(msg) retval = True except: retval = False dla.addMessage("Add features to Feature Service failed") dla.showTraceback() dla.addError(json.dumps(result)) error = True featuresProcessed += chunk except: retval = False dla.addMessage("Add features to Feature Service failed") dla.showTraceback() error = True pass return retval
def getExpression(row, names, expression): calcNew = None try: calcNew = calcValue(row, names, expression) #setValue(row,targetName,sourceValue,calcNew,idx) except: err = "Exception caught: unable to set value for expression=" + expression dla.showTraceback() dla.addError(err) print(err) dla._errCount += 1 return calcNew
def setSpatialReference(dataset,xmlDoc,desc,lyrtype): try: spref = str(desc.spatialReference.factoryCode) if spref == None or spref == '' or spref == '0': setSourceTarget(dataset,xmlDoc,lyrtype + "SpatialReference",desc.spatialReference.exportToString()) else: setSourceTarget(dataset,xmlDoc,lyrtype + "FactoryCode",spref) except: try: setSourceTarget(dataset,xmlDoc,lyrtype + "SpatialReference",desc.spatialReference.exportToString()) except: dla.showTraceback() arcpy.AddError("Could not set Spatial Reference for " + lyrtype + " Layer")
def getExpression(row,names,expression): calcNew = None try: calcNew = calcValue(row,names,expression) #setValue(row,targetName,sourceValue,calcNew,idx) except: err = "Exception caught: unable to set value for expression=" + expression dla.showTraceback() dla.addError(err) print(err) dla._errCount += 1 return calcNew
def getValueMap(row, names, sourceValue, field): # run value map function for a row valueMaps = field.getElementsByTagName("ValueMap") newValue = None found = False otherwise = None for valueMap in valueMaps: try: otherwise = valueMap.getElementsByTagName("Otherwise")[0] otherwise = dla.getTextValue(otherwise) except: otherwise = None sourceValues = [] sourceValues = valueMap.getElementsByTagName("sValue") targetValues = [] targetValues = valueMap.getElementsByTagName("tValue") i = 0 for val in sourceValues: sValue = dla.getTextValue(val) try: sourceTest = float(sValue) except ValueError: sourceTest = str(sValue) if sourceTest == '': sourceTest = None #if mapExpr and mapExpr != "": # sourceValue = calcValue(row,names,mapExpr) if sourceValue == sourceTest or sourceValue == sValue: # this will check numeric and non-numeric equivalency for current values in value maps found = True try: newValue = dla.getTextValue(targetValues[i]) except: dla._errCount += 1 success = False err = "Unable to map values for " + sourceValue + ", value = " + str( newValue) dla.showTraceback() dla.addError(err) print(err) i = i + 1 if not found: if str(otherwise) != "None": newValue = otherwise else: dla._errCount += 1 success = False err = "Unable to find map value (otherwise) for " + str( targetName) + ", value = " + str(sourceValue) dla.addError(err) return newValue
def extract(xmlFileName, rowLimit, workspace, sourceLayer, targetFC): xmlDoc = dla.getXmlDoc(xmlFileName) if workspace == "" or workspace == "#" or workspace == None: dla.workspace = arcpy.env.scratchGDB else: dla.workspace = workspace fields = dla.getFields(xmlFileName) success = True name = '' try: if not arcpy.Exists(dla.workspace): dla.addMessage(dla.workspace + " does not exist, attempting to create") dla.createGeodatabase() if len(fields) > 0: arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1) if sourceLayer == '' or sourceLayer == '#': source = dla.getNodeValue(xmlDoc, "Datasets/Source") else: source = sourceLayer if targetFC == '' or targetFC == '#': targetName = dla.getTargetName(xmlDoc) else: targetName = targetFC[targetFC.rfind(os.sep) + 1:] sourceName = dla.getSourceName(xmlDoc) arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") #if not arcpy.Exists(sourceLayer): # dla.addError("Layer " + sourceLayer + " does not exist, exiting") # return retVal = exportDataset(xmlDoc, sourceLayer, dla.workspace, targetName, rowLimit) if retVal == False: success = False arcpy.SetProgressorPosition() except: dla.addError("A Fatal Error occurred") dla.showTraceback() success = False finally: arcpy.ResetProgressor() #arcpy.RefreshCatalog(dla.workspace) arcpy.ClearWorkspaceCache_management(dla.workspace) return success
def getValueMap(row,names,sourceValue,field): # run value map function for a row valueMaps = field.getElementsByTagName("ValueMap") newValue = None found = False otherwise = None for valueMap in valueMaps: try: otherwise = valueMap.getElementsByTagName("Otherwise")[0] otherwise = dla.getTextValue(otherwise) except: otherwise = None sourceValues = [] sourceValues = valueMap.getElementsByTagName("sValue") targetValues = [] targetValues = valueMap.getElementsByTagName("tValue") i = 0 for val in sourceValues: sValue = dla.getTextValue(val) try: sourceTest = float(sValue) except ValueError: sourceTest = str(sValue) if sourceTest == '': sourceTest = None #if mapExpr and mapExpr != "": # sourceValue = calcValue(row,names,mapExpr) if sourceValue == sourceTest or sourceValue == sValue: # this will check numeric and non-numeric equivalency for current values in value maps found = True try: newValue = dla.getTextValue(targetValues[i]) except: dla._errCount += 1 success = False err = "Unable to map values for " + sourceValue + ", value = " + str(newValue) dla.showTraceback() dla.addError(err) print(err) i = i + 1 if not found: if str(otherwise) != "None": newValue = otherwise else: dla._errCount += 1 success = False err = "Unable to find map value (otherwise) for " + str(targetName) + ", value = " + str(sourceValue) dla.addError(err) return newValue
def setSpatialReference(dataset,xmlDoc,desc,lyrtype): if desc.datasetType.lower() == 'table': return try: spref = str(desc.spatialReference.factoryCode) if spref == None or spref == '' or spref == '0': setSourceTarget(dataset,xmlDoc,lyrtype + "SpatialReference",desc.spatialReference.exportToString()) else: setSourceTarget(dataset,xmlDoc,lyrtype + "FactoryCode",spref) except: try: setSourceTarget(dataset,xmlDoc,lyrtype + "SpatialReference",desc.spatialReference.exportToString()) except: dla.showTraceback() arcpy.AddError("Could not set Spatial Reference for " + lyrtype + " Layer")
def extract(xmlFileName,rowLimit,workspace,sourceLayer,targetFC): xmlDoc = dla.getXmlDoc(xmlFileName) if workspace == "" or workspace == "#" or workspace == None: dla.workspace = arcpy.env.scratchGDB else: dla.workspace = workspace fields = dla.getFields(xmlFileName) success = True name = '' try: if not arcpy.Exists(dla.workspace): dla.addMessage(dla.workspace + " does not exist, attempting to create") dla.createGeodatabase() if len(fields) > 0: arcpy.SetProgressor("step", "Importing Layer...",0,1,1) if sourceLayer == '' or sourceLayer == '#': source = dla.getNodeValue(xmlDoc,"Datasets/Source") else: source = sourceLayer if targetFC == '' or targetFC == '#': targetName = dla.getTargetName(xmlDoc) else: targetName = targetFC[targetFC.rfind(os.sep)+1:] sourceName = dla.getSourceName(xmlDoc) arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...") #if not arcpy.Exists(sourceLayer): # dla.addError("Layer " + sourceLayer + " does not exist, exiting") # return retVal = exportDataset(xmlDoc,sourceLayer,dla.workspace,targetName,rowLimit) if retVal == False: success = False arcpy.SetProgressorPosition() except: dla.addError("A Fatal Error occurred") dla.showTraceback() success = False finally: arcpy.ResetProgressor() #arcpy.RefreshCatalog(dla.workspace) arcpy.ClearWorkspaceCache_management(dla.workspace) return success
def test6(): dla._project = arcpy.mp.ArcGISProject( r"C:\Users\Steve\Documents\ArcGIS\Projects\Trails\Trails.aprx") layer = "Trails" try: desc = arcpy.Describe(layer) # never works in scripts except: arcpy.AddMessage("Describe error") dla.showTraceback() layer = dla.getLayer( "Trails") # loop through maps/layers to find matching name if layer != None and layer.supports("DataSource"): try: arcpy.AddMessage(layer.dataSource) except: arcpy.AddMessage("Print error")
def getValue(ftype, flength, targetName, targetValue, val): retVal = val # init to the value calculated so far. This function will alter as needed for field type try: if retVal == 'None': retVal = None if retVal != targetValue: if ftype == 'Integer' or ftype == 'Double': # if the type is numeric then try to cast to float if str(val) == 'None' or str(val) == dla._noneFieldName: retVal = None else: try: valTest = float(val) retVal = val except: err = "Exception caught: unable to cast " + targetName + " to " + ftype + " : '" + str( val) + "'" dla.addError(err) dla._errCount += 1 elif ftype == 'String': # if a string then cast to string or encode utf-8 if type(val) == 'str': retVal = val.encode('utf-8', errors='replace').decode( 'utf-8', errors='backslashreplace') # handle unicode else: retVal = str(val) # check length to make sure it is not too long. if len(retVal) > int(flength): err = "Exception caught: value length > field length for " + targetName + "(Length " + str( flength) + ") : '" + str(retVal) + "'" dla.addError(err) dla._errCount += 1 else: retVal = val except: err = "Exception caught: unable to get value for value=" + str( val) + " fieldname=" + targetName dla.showTraceback() dla.addError(err) dla._errCount += 1 return retVal
def extract(xmlFileName, rowLimit, workspace, source, target, datasetType): xmlDoc = dla.getXmlDoc(xmlFileName) if workspace == "" or workspace == "#" or workspace == None: dla.workspace = dla.setWorkspace() else: dla.workspace = workspace fields = dla.getFields(xmlFileName) success = True name = '' try: if not arcpy.Exists(dla.workspace): dla.addMessage(dla.workspace + " does not exist, attempting to create") dla.createGeodatabase() if len(fields) > 0: arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1) targetName = dla.getDatasetName(target) sourceName = dla.getDatasetName(source) arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") if not arcpy.Exists(source): dla.addError("Layer " + source + " does not exist, exiting") return retVal = exportDataset(xmlDoc, source, dla.workspace, targetName, rowLimit, datasetType) if retVal == False: success = False arcpy.SetProgressorPosition() except: dla.addError("A Fatal Error occurred") dla.showTraceback() success = False finally: arcpy.ResetProgressor() #arcpy.RefreshCatalog(dla.workspace) arcpy.ClearWorkspaceCache_management(dla.workspace) return success
def theProjectWay(): """ This function is currently not used. It is an alternative to the create feature class/append approach currently being used. It is slower because the entire dataset is projected first, and it is less straightforward because it adds the transform method that append seems to know how to handle already. It is better though because it will actually raise trappable errors while Append fails silently... The solution in the other function is to count the resulting records and report issues. """ if targetRef != '': if arcpy.Exists(targetName): arcpy.Delete_management(targetName) inttable = workspace + os.sep + targetName + "_prj" arcpy.env.workspace = workspace xform = None desc = arcpy.Describe(sourceLayer) xforms = arcpy.ListTransformations(desc.spatialReference, targetRef, desc.extent) #if sourceRef.exportToString().find("NAD_1983") > -1 and targetRef.exportToString().find("WGS_1984") > -1: xform = xforms[0] #for xform in xforms: dla.addMessage("Transform: " + xform) try: res = arcpy.Project_management(sourceLayer, inttable, out_coor_system=targetRef, transform_method=xform) except: dla.showTraceback() err = "Unable to project the data to the target spatial reference, please check settings and try projecting manually in ArcGIS" dla.addError(err) return False dla.addMessage("Features projected") view = dla.makeFeatureViewForLayer(dla.workspace, inttable, viewName, whereClause, xmlFields) dla.addMessage("View Created") #except: # arcpy.AddError("Unabled to create feature View " + viewName) count = arcpy.GetCount_management(view).getOutput(0) dla.addMessage(str(count) + " source rows") #sourceRef = getSpatialReference(xmlDoc,"Source") #res = arcpy.CreateFeatureclass_management(workspace,targetName,template=sourceLayer,spatial_reference=targetRef) res = arcpy.CopyFeatures_management(view, targetName) dla.addMessage("Features copied")
def setValue(row, names, types, lengths, targetName, targetValue, val): try: if val == 'None': val = None if val != targetValue: idx = names.index(targetName) if types[idx] == 'Integer' or types[idx] == 'Double': # if the type is numeric then try to cast to float try: valTest = float(val) row[idx] = val except: err = "Exception caught: unable to cast " + targetName + " to " + types[ idx] + " : '" + str(val) + "'" dla.addError(err) print(err) dla._errCount += 1 elif types[idx] == 'String': # if a string then cast to string and check length if type(val) != 'str': val = str(val) if len(val) > int(lengths[idx]): err = "Exception caught: value length > field length for " + targetName + "(Length " + str( lengths[idx]) + ") : '" + str(val) + "'" dla.addError(err) print(err) dla._errCount += 1 else: row[idx] = val else: row[idx] = val except: success = False err = "Exception caught: unable to set value for value=" + str( val) + " fieldname=" + targetName dla.showTraceback() dla.addError(err) print(err) dla._errCount += 1
def theProjectWay(): """ This function is currently not used. It is an alternative to the create feature class/append approach currently being used. It is slower because the entire dataset is projected first, and it is less straightforward because it adds the transform method that append seems to know how to handle already. It is better though because it will actually raise trappable errors while Append fails silently... The solution in the other function is to count the resulting records and report issues. """ if targetRef != '': if arcpy.Exists(targetName): arcpy.Delete_management(targetName) inttable = workspace+os.sep+targetName+"_prj" arcpy.env.workspace = workspace xform = None desc = arcpy.Describe(sourceLayer) xforms = arcpy.ListTransformations(desc.spatialReference, targetRef, desc.extent) #if sourceRef.exportToString().find("NAD_1983") > -1 and targetRef.exportToString().find("WGS_1984") > -1: xform = xforms[0] #for xform in xforms: dla.addMessage("Transform: " + xform) try: res = arcpy.Project_management(sourceLayer,inttable,out_coor_system=targetRef,transform_method=xform) except: dla.showTraceback() err = "Unable to project the data to the target spatial reference, please check settings and try projecting manually in ArcGIS" dla.addError(err) return False dla.addMessage("Features projected") view = dla.makeFeatureViewForLayer(dla.workspace,inttable,viewName,whereClause,xmlFields) dla.addMessage("View Created") #except: # arcpy.AddError("Unabled to create feature View " + viewName) count = arcpy.GetCount_management(view).getOutput(0) dla.addMessage(str(count) + " source rows") #sourceRef = getSpatialReference(xmlDoc,"Source") #res = arcpy.CreateFeatureclass_management(workspace,targetName,template=sourceLayer,spatial_reference=targetRef) res = arcpy.CopyFeatures_management(view,targetName) dla.addMessage("Features copied")
def setValue(row,names,types,lengths,targetName,targetValue,val): try: if val == 'None': val = None if val != targetValue: idx = names.index(targetName) if types[idx] == 'Integer' or types[idx] == 'Double': # if the type is numeric then try to cast to float try: valTest = float(val) row[idx] = val except: err = "Exception caught: unable to cast " + targetName + " to " + types[idx] + " : '" + str(val) + "'" dla.addError(err) print(err) dla._errCount += 1 elif types[idx] == 'String': # if a string then cast to string and check length if type(val) != 'str': val = str(val) if len(val) > int(lengths[idx]): err = "Exception caught: value length > field length for " + targetName + "(Length " + str(lengths[idx]) + ") : '" + str(val) + "'" dla.addError(err) print(err) dla._errCount += 1 else: row[idx] = val else: row[idx] = val except: success = False err = "Exception caught: unable to set value for value=" + str(val) + " fieldname=" + targetName dla.showTraceback() dla.addError(err) print(err) dla._errCount += 1
def getValue(names,ftypes,lengths,targetName,targetValue,val): retVal = val # init to the value calculated so far. This function will alter as needed for field type try: idx = names.index(targetName) if str(retVal) == 'None': retVal = None if str(retVal) != str(targetValue): if ftypes[idx] == 'Integer' or ftypes[idx] == 'Double' or ftypes[idx] == 'Float': # if the type is numeric then try to cast to float try: valTest = float(val) retVal = val except: err = "Exception caught: unable to cast " + targetName + " to " + ftypes[idx] + " : '" + str(val) + "'" dla.addError(err) dla._errCount += 1 elif ftypes[idx] == 'String': # if a string then cast to string or encode utf-8 if type(val) == 'str': retVal = val.encode('utf-8', errors='replace').decode('utf-8',errors='backslashreplace') # handle unicode else: retVal = str(val) # check length to make sure it is not too long. if len(retVal) > int(lengths[idx]): err = "Exception caught: value length > field length for " + targetName + "(Length " + str(lengths[idx]) + ") : '" + str(retVal) + "'" dla.addError(err) dla._errCount += 1 else: retVal = val except: err = "Exception caught: unable to get value for value=" + str(val) + " fieldname=" + targetName dla.showTraceback() dla.addError(err) dla._errCount += 1 return retVal
def getDomainMap(row, sourceValue, field): # run domain map function for a row valueMaps = field.getElementsByTagName("DomainMap") newValue = sourceValue found = False otherwise = None for valueMap in valueMaps: sourceValues = [] sourceValues = valueMap.getElementsByTagName("sValue") targetValues = [] targetValues = valueMap.getElementsByTagName("tValue") i = 0 for val in sourceValues: sValue = dla.getTextValue(val) try: sourceTest = float(sValue) except ValueError: sourceTest = str(sValue) if sourceTest == '' or sourceTest == 'None': sourceTest = None if sourceValue == sourceTest or sourceValue == sValue: # this will check numeric and non-numeric equivalency for current values in maps found = True try: newValue = dla.getTextValue(targetValues[i]) except: dla._errCount += 1 success = False err = "Unable to map values for " + sourceValue + ", value = " + str( newValue) dla.showTraceback() dla.addError(err) print(err) i = i + 1 return newValue
def exportDataset(xmlDoc, source, workspace, targetName, rowLimit, datasetType): result = True xmlFields = xmlDoc.getElementsByTagName("Field") dla.addMessage("Exporting Data from " + source) whereClause = "" if rowLimit != None: whereClause = getObjectIdWhereClause(source, rowLimit) if whereClause != '' and whereClause != ' ': dla.addMessage("Where " + str(whereClause)) sourceName = dla.getDatasetName(source) viewName = sourceName + "_View" dla.addMessage(viewName) targetRef = getSpatialReference(xmlDoc, "Target") sourceRef = getSpatialReference(xmlDoc, "Source") if datasetType == 'Table': isTable = True elif targetRef != '': isTable = False arcpy.env.workspace = workspace if source.lower().endswith('.lyrx') and not dla.hasJoin(source): view = dla.getLayerFromString(source) elif isTable: view = dla.makeTableView(dla.workspace, source, viewName, whereClause, xmlFields) elif not isTable: view = dla.makeFeatureView(dla.workspace, source, viewName, whereClause, xmlFields) dla.addMessage("View Created") srcCount = arcpy.GetCount_management(view).getOutput(0) dla.addMessage(str(srcCount) + " source rows") if str(srcCount) == '0': result = False dla.addError("Failed to extract " + sourceName + ", Nothing to export") else: arcpy.env.overwriteOutput = True ds = workspace + os.sep + targetName currentPreserveGlobalIDs = arcpy.env.preserveGlobalIds if dla.processGlobalIds( xmlDoc ): # both datasets have globalids in the correct workspace types arcpy.env.preserveGlobalIds = True # try to preserve dla.addMessage("Attempting to preserve GlobalIDs") else: arcpy.env.preserveGlobalIds = False # don't try to preserve dla.addMessage("Unable to preserve GlobalIDs") if isTable: arcpy.TableToTable_conversion(in_rows=view, out_path=workspace, out_name=targetName) else: spRefMatch = dla.compareSpatialRef(xmlDoc) currentRef = arcpy.env.outputCoordinateSystem # grab currrent env settings currentTrans = arcpy.env.geographicTransformations if not spRefMatch: arcpy.env.outputCoordinateSystem = targetRef transformations = arcpy.ListTransformations( sourceRef, targetRef) transformations = ";".join( transformations ) # concat the values - format change for setting the values. arcpy.env.geographicTransformations = transformations arcpy.FeatureClassToFeatureClass_conversion(in_features=view, out_path=workspace, out_name=targetName) if not spRefMatch: # set the spatial reference back arcpy.env.outputCoordinateSystem = currentRef arcpy.env.geographicTransformations = currentTrans arcpy.env.preserveGlobalIds = currentPreserveGlobalIDs removeDefaultValues( ds ) # don't want to turn nulls into defaultValues in the intermediate data # not needed if doing the transformations approach above... # if isTable: # if not createDataset('Table',workspace,targetName,None,xmlDoc,source,None): # arcpy.AddError("Unable to create intermediate table, exiting: " + workspace + os.sep + targetName) # return False # elif not isTable: # geomType = arcpy.Describe(source).shapeType # if not createDataset('FeatureClass',workspace,targetName,geomType,xmlDoc,source,targetRef): # arcpy.AddError("Unable to create intermediate feature class, exiting: " + workspace + os.sep + targetName) # return False # fieldMap = getFieldMap(view,ds) # arcpy.Append_management(view,ds,schema_type="NO_TEST",field_mapping=fieldMap) dla.addMessage(arcpy.GetMessages(2)) # only serious errors count = arcpy.GetCount_management(ds).getOutput(0) dla.addMessage(str(count) + " source rows exported to " + targetName) if str(count) == '0': result = False dla.addError( "Failed to load to " + targetName + ", it is likely that your data falls outside of the target Spatial Reference Extent or there is another basic issue" ) dla.addError( "To verify please use the Append and/or Copy Features tool to load some data to an intermediate dataset:" ) dla.addError(ds) dla.showTraceback() return result
def addRows(source, targetUrl, expr): # add rows using _chunkSize retval = False error = False # add section try: arcpy.SetProgressor("default", "Adding Rows") arcpy.SetProgressorLabel("Adding Rows") rowjs = rowsToJson(source) url = targetUrl + '/addFeatures' try: numFeat = len(rowjs['features']) except: numFeat = 0 if numFeat == 0: dla.addMessage("0 Rows to Add, exiting") return True # nothing to add is OK if numFeat > _chunkSize: chunk = _chunkSize else: chunk = numFeat rowsProcessed = 0 while rowsProcessed < numFeat and error == False: next = rowsProcessed + chunk rows = rowjs['features'][rowsProcessed:next] msg = "Adding rows " + str(rowsProcessed) + ":" + str(next) dla.addMessage(msg) arcpy.SetProgressorLabel(msg) token = getSigninToken() params = { 'rollbackonfailure': 'true', 'f': 'json', 'token': token, 'features': json.dumps(rows) } result = sendRequest(url, params) try: if result['error'] != None: retval = False dla.addMessage("Add rows to Service failed") dla.addMessage(json.dumps(result)) error = True except: try: lenAdded = len(result['addResults']) total = rowsProcessed + chunk if total > numFeat: total = numFeat msg = str(lenAdded) + " rows added, " + str( total) + "/" + str(numFeat) print(msg) dla.addMessage(msg) retval = True except: retval = False dla.addMessage( "Add rows to Service failed. Unfortunately you will need to re-run this tool." ) #dla.showTraceback() #dla.addError(json.dumps(result)) error = True rowsProcessed += chunk except: retval = False dla.addMessage("Add rows to Service failed") dla.showTraceback() error = True pass return retval
def setFieldValues(table, fields, names, ftypes, lengths): # from source xml file match old values to new values to prepare for append to target geodatabase success = False row = None try: updateCursor = arcpy.da.UpdateCursor(table, names) result = arcpy.GetCount_management(table) numFeat = int(result.getOutput(0)) dla.addMessage(table + ", " + str(numFeat) + " features") i = 0 arcpy.SetProgressor("Step", "Calculating " + table + "...", 0, numFeat, getProgressUpdate(numFeat)) for row in updateCursor: success = True if dla._errCount > dla.maxErrorCount: dla.addError( "Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount)) return False if i > dla.maxrows: dla.addError( "Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows)) return True i = i + 1 setProgressor(i, numFeat) for field in fields: method = "None" sourceName = dla.getNodeValue(field, "SourceName").replace('.', '_') targetName = dla.getNodeValue(field, "TargetName").replace('.', '_') targetValue = getTargetValue(row, field, names, sourceName, targetName) sourceValue = getSourceValue(row, names, sourceName, targetName) method = dla.getNodeValue(field, "Method").replace(" ", "") try: fnum = dla.getFieldIndexList(names, targetName) except: fnum = None # defensive check to skip fields that do not exist even though they are listed in Xml if fnum != None: if method == "None" or (method == "Copy" and sourceName == '(None)'): method = "None" val = None elif method == "Copy": val = sourceValue elif method == "DefaultValue": val = dla.getNodeValue(field, "DefaultValue") elif method == "SetValue": val = dla.getNodeValue(field, "SetValue") elif method == "ValueMap": val = getValueMap(targetName, sourceValue, field) elif method == "DomainMap": val = getDomainMap(row, sourceValue, field) elif method == "ChangeCase": case = dla.getNodeValue(field, method) expression = getChangeCase(sourceValue, case) val = getExpression(row, names, expression) elif method == "Concatenate": val = getConcatenate(row, names, field) elif method == "Left": chars = dla.getNodeValue(field, "Left") val = getSubstring(sourceValue, "0", chars) elif method == "Right": chars = dla.getNodeValue(field, "Right") val = getSubstring(sourceValue, len(str(sourceValue)) - int(chars), len(str(sourceValue))) elif method == "Substring": start = dla.getNodeValue(field, "Start") length = dla.getNodeValue(field, "Length") val = getSubstring(sourceValue, start, length) elif method == "Split": splitter = dla.getNodeValue(field, "SplitAt") splitter = splitter.replace("(space)", " ") part = dla.getNodeValue(field, "Part") val = getSplit(sourceValue, splitter, part) elif method == "ConditionalValue": sname = dla.getNodeValue(field, "SourceName") oper = dla.getNodeValue(field, "Oper") iif = dla.getNodeValue(field, "If") if iif != " " and type(iif) == 'str': for name in names: if name in iif: iif = iif.replace(name, "|" + name + "|") tthen = dla.getNodeValue(field, "Then") eelse = dla.getNodeValue(field, "Else") for name in names: if name in eelse: eelse = eelse.replace(name, "|" + name + "|") expression = "|" + tthen + "| " + " if |" + sname + "| " + oper + " |" + iif + "| else " + eelse val = getExpression(row, names, expression) elif method == "Expression": expression = dla.getNodeValue(field, method) for name in names: expression = expression.replace( name, "|" + name + "|") val = getExpression(row, names, expression) # set field value newVal = getValue(ftypes[fnum], lengths[fnum], targetName, targetValue, val) row[fnum] = newVal if dla.debug == True: dla.addMessage(targetName + ':' + str(newVal) + ':' + str(targetValue)) try: updateCursor.updateRow(row) #printRow(row,names) except: dla._errCount += 1 success = False err = "Exception caught: unable to update row" if dla._errCount < 200: printRow(row, names) dla.showTraceback() else: if dla._errCount < 2000: dla.addMessage( 'More than 200 errors encountered... debug output suppressed' ) dla.addError(err) except: dla._errCount += 1 success = False err = "Exception caught: unable to update dataset" if row != None: printRow(row, names) dla.showTraceback() dla.addError(err) finally: del updateCursor dla.cleanupGarbage() arcpy.ResetProgressor() return success
def setFieldValues(table,fields,names,ftypes,lengths): # from source xml file match old values to new values to prepare for append to target geodatabase success = False row = None try: updateCursor = arcpy.da.UpdateCursor(table,names) result = arcpy.GetCount_management(table) numFeat = int(result.getOutput(0)) dla.addMessage(table + ", " + str(numFeat) + " features") i = 0 arcpy.SetProgressor("Step","Calculating " + table + "...",0,numFeat,getProgressUpdate(numFeat)) for row in updateCursor: success = True if dla._errCount > dla.maxErrorCount: #dla.addMessage("Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount)) dla.addError("Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount)) return False if i > dla.maxrows: #dla.addMessage("Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows)) dla.addError("Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows)) return True i = i + 1 setProgressor(i,numFeat) for field in fields: method = "None" sourceName = dla.getNodeValue(field,"SourceName") targetName = dla.getNodeValue(field,"TargetName") targetValue = getTargetValue(row,field,names,sourceName,targetName) sourceValue = getSourceValue(row,names,sourceName,targetName) method = dla.getNodeValue(field,"Method").replace(" ","") fnum = names.index(targetName) if method == "None" or (method == "Copy" and sourceName == dla._noneFieldName): val = None method = "None" elif method == "Copy": val = sourceValue elif method == "DefaultValue": val = dla.getNodeValue(field,"DefaultValue") elif method == "SetValue": val = dla.getNodeValue(field,"SetValue") elif method == "ValueMap": val = getValueMap(row,names,sourceValue,field) elif method == "ChangeCase": case = dla.getNodeValue(field,method) expression = getChangeCase(sourceValue,case) val = getExpression(row,names,expression) elif method == "Concatenate": val = getConcatenate(row,names,field) elif method == "Left": chars = dla.getNodeValue(field,"Left") val = getSubstring(sourceValue,"0",chars) elif method == "Right": chars = dla.getNodeValue(field,"Right") val = getSubstring(sourceValue,len(str(sourceValue))-int(chars),len(str(sourceValue))) elif method == "Substring": start = int(dla.getNodeValue(field,"Start")) lngth = int(dla.getNodeValue(field,"Length")) if sourceValue != None: lngth = start + lngth val = getSubstring(sourceValue,start,lngth) elif method == "Split": splitter = dla.getNodeValue(field,"SplitAt") splitter = splitter.replace("(space)"," ") part = dla.getNodeValue(field,"Part") val = getSplit(sourceValue,splitter,part) elif method == "ConditionalValue": sname = dla.getNodeValue(field,"SourceName") oper = dla.getNodeValue(field,"Oper") iif = dla.getNodeValue(field,"If") if iif != " " and type(iif) == 'str': for name in names: if name in iif: iif = iif.replace(name,"|"+name+"|") tthen = dla.getNodeValue(field,"Then") eelse = dla.getNodeValue(field,"Else") for name in names: if name in eelse: eelse = eelse.replace(name,"|"+name+"|") expression = "|" + tthen + "| " + " if |" + sname + "| " + oper + " |" + iif + "| else " + eelse val = getExpression(row,names,expression) elif method == "Expression": expression = dla.getNodeValue(field,method) for name in names: expression = expression.replace(name,"|" + name + "|") val = getExpression(row,names,expression) # set field value if method != "None" and val != None: newVal = getValue(names,ftypes,lengths,targetName,targetValue,val) row[fnum] = newVal else: row[fnum] = val try: updateCursor.updateRow(row) except: dla._errCount += 1 success = False err = "Exception caught: unable to update row" printRow(row,names) dla.showTraceback() dla.addError(err) except: dla._errCount += 1 success = False err = "Exception caught: unable to update dataset" if row != None: printRow(row,names) dla.showTraceback() dla.addError(err) finally: del updateCursor dla.cleanupGarbage() arcpy.ResetProgressor() return success
def deleteFeatures(sourceLayer, targelUrl, expr): # delete features using chunks of _chunkSize retval = False error = False # delete section ids = getOIDs(targelUrl, expr) try: lenDeleted = 100 #Chunk deletes using chunk size at a time featuresProcessed = 0 numFeat = len(ids) if numFeat == 0: dla.addMessage("0 Features to Delete, exiting") return True # nothing to delete is OK if numFeat > _chunkSize: chunk = _chunkSize else: chunk = numFeat arcpy.SetProgressor("default", "Deleting Features") while featuresProcessed < numFeat and error == False: #Chunk deletes using chunk size at a time next = featuresProcessed + chunk msg = "Deleting features " + str(featuresProcessed) + ":" + str( next) dla.addMessage(msg) arcpy.SetProgressorLabel(msg) oids = ",".join(str(e) for e in ids[featuresProcessed:next]) url = targelUrl + '/deleteFeatures' token = dla.getSigninToken() params = {'f': 'pjson', 'objectIds': oids, 'token': token} result = dla.sendRequest(url, params) try: if result['error'] != None: retval = False dla.addMessage( "Delete features from Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: try: lenDeleted = len(result['deleteResults']) msg = str(lenDeleted) + " features deleted, " + str( featuresProcessed + chunk) + "/" + str(numFeat) print(msg) dla.addMessage(msg) retval = True except: retval = False error = True dla.showTraceback() dla.addMessage( "Delete features from Feature Service failed") dla.addError(json.dumps(result)) featuresProcessed += chunk except: retval = False error = True dla.showTraceback() dla.addMessage("Delete features from Feature Service failed") pass return retval
def addFeatures(sourceLayer, targelUrl, expr): # add features using _chunkSize retval = False error = False # add section try: arcpy.SetProgressor("default", "Adding Features") arcpy.SetProgressorLabel("Adding Features") featurejs = featureclass_to_json(sourceLayer) url = targelUrl + '/addFeatures' numFeat = len(featurejs['features']) if numFeat == 0: dla.addMessage("0 Features to Add, exiting") return True # nothing to add is OK if numFeat > _chunkSize: chunk = _chunkSize else: chunk = numFeat featuresProcessed = 0 while featuresProcessed < numFeat and error == False: next = featuresProcessed + chunk features = featurejs['features'][featuresProcessed:next] msg = "Adding features " + str(featuresProcessed) + ":" + str(next) dla.addMessage(msg) arcpy.SetProgressorLabel(msg) token = dla.getSigninToken() params = { 'rollbackonfailure': 'true', 'f': 'json', 'token': token, 'features': json.dumps(features) } result = dla.sendRequest(url, params) try: if result['error'] != None: retval = False dla.addMessage("Add features to Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: try: lenAdded = len(result['addResults']) msg = str(lenAdded) + " features added, " + str( featuresProcessed + chunk) + "/" + str(numFeat) print(msg) dla.addMessage(msg) retval = True except: retval = False dla.addMessage("Add features to Feature Service failed") dla.showTraceback() dla.addError(json.dumps(result)) error = True featuresProcessed += chunk except: retval = False dla.addMessage("Add features to Feature Service failed") dla.showTraceback() error = True pass return retval
expression = dla.getNodeValue(field, method) for name in names: expression = expression.replace(name, "|" + name + "|") val = getExpression(row, names, expression) # set field value if method != "None": setValue(row, names, types, lengths, targetName, targetValue, val) try: updateCursor.updateRow(row) except: dla._errCount += 1 success = False err = "Exception caught: unable to update row" printRow(row, names) dla.showTraceback() dla.addError(err) except: dla._errCount += 1 success = False err = "Exception caught: unable to update dataset" if row != None: printRow(row, names) dla.showTraceback() dla.addError(err) finally: del updateCursor dla.cleanupGarbage() arcpy.ResetProgressor()