def preview(xmlFileName): global sourceLayer,targetLayer,rowLimit dla.setWorkspace() dla._errCount = 0 xmlDoc = dla.getXmlDoc(xmlFileName) #arcpy.AddMessage("rowLimit = " + str(rowLimit) ) if rowLimit == "" or rowLimit == None: rowLimit = 100 if sourceLayer == "" or sourceLayer == None: sourceLayer = dla.getNodeValue(xmlDoc,"Source") if targetLayer == "" or targetLayer == None: targetLayer = dla.getNodeValue(xmlDoc,"Target") dte = datetime.datetime.now().strftime("%Y%m%d%H%M") targetName = dla.getTargetName(xmlDoc) + dte targetFC = os.path.join(dla.workspace,targetName) res = dlaExtractLayerToGDB.extract(xmlFileName,rowLimit,dla.workspace,sourceLayer,targetFC) if res == True: res = dlaFieldCalculator.calculate(xmlFileName,dla.workspace,targetName,False) if res == True: arcpy.env.addOutputsToMap = True layer = targetName layertmp = targetName + "tmp" if arcpy.Exists(layertmp): arcpy.Delete_management(layertmp) arcpy.MakeFeatureLayer_management(targetFC,layertmp) fieldInfo = dla.getLayerVisibility(layertmp,xmlFileName) arcpy.MakeFeatureLayer_management(targetFC,layer,None,dla.workspace,fieldInfo) # should make only the target fields visible arcpy.SetParameter(_success,layer) else: dla.addError("Failed to Extract data") print("Failed to Extract data") dla.writeFinalMessage("Data Assistant - Preview")
def main(argv = None): global sourceLayer,targetLayer xmlDoc = dla.getXmlDoc(xmlFileName) if dla.workspace == "" or dla.workspace == "#" or dla.workspace == None: dla.workspace = arcpy.env.scratchGDB if sourceLayer == "" or sourceLayer == None: sourceLayer = dla.getNodeValue(xmlDoc,"Source") if targetLayer == "" or targetLayer == None: targetLayer = dla.getNodeValue(xmlDoc,"Target") if success == False: dla.addError("Errors occurred during process") success = extract(xmlFileName,rowLimit,dla.workspace,sourceLayer,targetLayer) arcpy.SetParameter(SUCCESS, success)
def preview(xmlFileName): global source, target, rowLimit dla.setWorkspace() dla._errCount = 0 xmlFileName = dla.getXmlDocName(xmlFileName) xmlDoc = dla.getXmlDoc(xmlFileName) #arcpy.AddMessage("rowLimit = " + str(rowLimit) ) if rowLimit == "" or rowLimit == None: rowLimit = 100 prj = dla.setProject(xmlFileName, dla.getNodeValue(xmlDoc, "Project")) if prj == None: dla.addError( "Unable to open your project, please ensure it is in the same folder as your current project or your Config file" ) return False if source == "" or source == None: source = dla.getDatasetPath(xmlDoc, "Source") if target == "" or target == None: target = dla.getDatasetPath(xmlDoc, "Target") if dla.isTable(source) or dla.isTable(target): datasetType = 'Table' else: datasetType = 'FeatureClass' dte = datetime.datetime.now().strftime("%Y%m%d%H%M") targetName = dla.getDatasetName(target) + dte targetDS = os.path.join(dla.workspace, targetName) res = dlaExtractLayerToGDB.extract(xmlFileName, rowLimit, dla.workspace, source, targetDS, datasetType) if res == True: res = dlaFieldCalculator.calculate(xmlFileName, dla.workspace, targetName, False) if res == True: arcpy.env.addOutputsToMap = True layer = targetName layertmp = targetName + "tmp" if arcpy.Exists(layertmp): arcpy.Delete_management(layertmp) if dla.isTable(targetDS): arcpy.MakeTableView_management(targetDS, layertmp) else: arcpy.MakeFeatureLayer_management(targetDS, layertmp) fieldInfo = dla.getLayerVisibility(layertmp, xmlFileName) if dla.isTable(targetDS): arcpy.MakeTableView_management(targetDS, layer, None, dla.workspace, fieldInfo) else: arcpy.MakeFeatureLayer_management(targetDS, layer, None, dla.workspace, fieldInfo) # should make only the target fields visible arcpy.SetParameter(_success, layer) else: dla.addError("Failed to Extract data") print("Failed to Extract data") dla.writeFinalMessage("Data Assistant - Preview")
def getSpatialReference(xmlDoc,lyrtype): spref = '' # try factoryCode first sprefstr = dla.getNodeValue(xmlDoc,lyrtype + "FactoryCode") if sprefstr != '': #arcpy.AddMessage(lyrtype + ":" + sprefstr) spref = arcpy.SpatialReference(sprefstr) else: sprefstr = dla.getNodeValue(xmlDoc,lyrtype + "SpatialReference") if sprefstr != '': #arcpy.AddMessage(lyrtype + ":" + sprefstr) spref = arcpy.SpatialReference() spref.loadFromString(sprefstr) if spref == '' and spref != None: arcpy.AddError("Unable to retrieve Spatial Reference for " + lyrtype + " layer") return spref
def getWhereClause(xmlDoc): # get the where clause using the xml document or return '' repl = xmlDoc.getElementsByTagName("ReplaceBy")[0] fieldName = dla.getNodeValue(repl,"FieldName") operator = dla.getNodeValue(repl,"Operator") value = dla.getNodeValue(repl,"Value") expr = '' type = getTargetType(xmlDoc,fieldName) if fieldName != '' and fieldName != '(None)' and operator != "Where": if type == 'String': value = "'" + value + "'" expr = fieldName + " " + operator + " " + value elif operator == 'Where': expr = value else: expr = '' # empty string by default return expr
def getWhereClause(xmlDoc): # get the where clause using the xml document or return '' repl = xmlDoc.getElementsByTagName("ReplaceBy")[0] fieldName = dla.getNodeValue(repl, "FieldName") operator = dla.getNodeValue(repl, "Operator") value = dla.getNodeValue(repl, "Value") expr = '' type = getTargetType(xmlDoc, fieldName) if fieldName != '' and fieldName != '(None)' and operator != "Where": if type == 'String': value = "'" + value + "'" expr = fieldName + " " + operator + " " + value elif operator == 'Where': expr = value else: expr = '' # empty string by default return expr
def extract(xmlFileName, rowLimit, workspace, sourceLayer, targetFC): xmlDoc = dla.getXmlDoc(xmlFileName) if workspace == "" or workspace == "#" or workspace == None: dla.workspace = arcpy.env.scratchGDB else: dla.workspace = workspace fields = dla.getFields(xmlFileName) success = True name = '' try: if not arcpy.Exists(dla.workspace): dla.addMessage(dla.workspace + " does not exist, attempting to create") dla.createGeodatabase() if len(fields) > 0: arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1) if sourceLayer == '' or sourceLayer == '#': source = dla.getNodeValue(xmlDoc, "Datasets/Source") else: source = sourceLayer if targetFC == '' or targetFC == '#': targetName = dla.getTargetName(xmlDoc) else: targetName = targetFC[targetFC.rfind(os.sep) + 1:] sourceName = dla.getSourceName(xmlDoc) arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") #if not arcpy.Exists(sourceLayer): # dla.addError("Layer " + sourceLayer + " does not exist, exiting") # return retVal = exportDataset(xmlDoc, sourceLayer, dla.workspace, targetName, rowLimit) if retVal == False: success = False arcpy.SetProgressorPosition() except: dla.addError("A Fatal Error occurred") dla.showTraceback() success = False finally: arcpy.ResetProgressor() #arcpy.RefreshCatalog(dla.workspace) arcpy.ClearWorkspaceCache_management(dla.workspace) return success
def preview(xmlFileName): global sourceLayer, targetLayer, rowLimit dla.setWorkspace() dla._errorCount = 0 xmlDoc = dla.getXmlDoc(xmlFileName) if rowLimit == "" or rowLimit == None: rowLimit = 100 if sourceLayer == "" or sourceLayer == None: sourceLayer = dla.getNodeValue(xmlDoc, "Source") if targetLayer == "" or targetLayer == None: targetLayer = dla.getNodeValue(xmlDoc, "Target") dte = datetime.datetime.now().strftime("%Y%m%d%H%M") targetName = dla.getTargetName(xmlDoc) + dte targetFC = os.path.join(dla.workspace, targetName) res = dlaExtractLayerToGDB.extract(xmlFileName, rowLimit, dla.workspace, sourceLayer, targetFC) if res == True: res = dlaFieldCalculator.calculate(xmlFileName, dla.workspace, targetName, False) if res == True: arcpy.env.addOutputsToMap = True layer = targetName layertmp = targetName + "tmp" if arcpy.Exists(layertmp): arcpy.Delete_management(layertmp) arcpy.MakeFeatureLayer_management(targetFC, layertmp) fieldInfo = dla.getLayerVisibility(layertmp, xmlFileName) arcpy.MakeFeatureLayer_management(targetFC, layer, None, dla.workspace, fieldInfo) # should make only the target fields visible arcpy.SetParameter(_success, layer) else: dla.addError("Failed to Extract data") print("Failed to Extract data") dla.writeFinalMessage("Data Assistant - Preview")
def extract(xmlFileName,rowLimit,workspace,sourceLayer,targetFC): xmlDoc = dla.getXmlDoc(xmlFileName) if workspace == "" or workspace == "#" or workspace == None: dla.workspace = arcpy.env.scratchGDB else: dla.workspace = workspace fields = dla.getFields(xmlFileName) success = True name = '' try: if not arcpy.Exists(dla.workspace): dla.addMessage(dla.workspace + " does not exist, attempting to create") dla.createGeodatabase() if len(fields) > 0: arcpy.SetProgressor("step", "Importing Layer...",0,1,1) if sourceLayer == '' or sourceLayer == '#': source = dla.getNodeValue(xmlDoc,"Datasets/Source") else: source = sourceLayer if targetFC == '' or targetFC == '#': targetName = dla.getTargetName(xmlDoc) else: targetName = targetFC[targetFC.rfind(os.sep)+1:] sourceName = dla.getSourceName(xmlDoc) arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...") #if not arcpy.Exists(sourceLayer): # dla.addError("Layer " + sourceLayer + " does not exist, exiting") # return retVal = exportDataset(xmlDoc,sourceLayer,dla.workspace,targetName,rowLimit) if retVal == False: success = False arcpy.SetProgressorPosition() except: dla.addError("A Fatal Error occurred") dla.showTraceback() success = False finally: arcpy.ResetProgressor() #arcpy.RefreshCatalog(dla.workspace) arcpy.ClearWorkspaceCache_management(dla.workspace) return success
def calculate(xmlFileName,workspace,name,ignore): dla.workspace = workspace success = True arcpy.ClearWorkspaceCache_management(dla.workspace) xmlDoc = dla.getXmlDoc(xmlFileName) dla.addMessage("Field Calculator: " + xmlFileName) arcpy.env.Workspace = dla.workspace table = dla.getTempTable(name) if not arcpy.Exists(table): dla.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): dla.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dla.getXmlElements(xmlFileName,"Field") sourceFields = dla.getXmlElements(xmlFileName,"SourceField") targetFields = dla.getXmlElements(xmlFileName,"TargetField") attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = dla.workspace targetName = dla.getNodeValue(field,"TargetName") sourceName = dla.getNodeValue(field,"SourceName") ftype = "String" length = "50" for target in targetFields: nm = target.getAttributeNode("Name").nodeValue if nm == targetName: ftype = target.getAttributeNode("Type").nodeValue length = target.getAttributeNode("Length").nodeValue # uppercase compare, later need to check for orig/upper name for calc #ups = [nm.upper() for nm in attrs] dla.addDlaField(table,targetName,field,attrs,ftype,length) allFields = sourceFields + targetFields desc = arcpy.Describe(table) layerNames = [] names = [] ftypes = [] lengths = [] ignore = ['FID','OBJECTID','GLOBALID','SHAPE','SHAPE_AREA','SHAPE_LENGTH','SHAPE_LEN','STLENGTH()','STAREA()','raster'] for name in ['OIDFieldName','ShapeFieldName','LengthFieldName','AreaFieldName','GlobalIDFieldName','RasterFieldName']: try: val = eval("desc." + name) val = val[val.rfind('.')+1:] ignore.append(val).upper() except: pass for field in desc.fields: if field.name.upper() not in ignore: layerNames.append(field.name.upper()) for field in allFields: nm = field.getAttributeNode("Name").nodeValue if nm != dla.noneName and nm.upper() not in ignore and nm.upper() in layerNames: try: names.index(nm) except: names.append(nm) typ = field.getAttributeNode("Type").nodeValue leng = field.getAttributeNode("Length").nodeValue ftypes.append(typ) lengths.append(leng) retVal = setFieldValues(table,fields,names,ftypes,lengths) if retVal == False: success = False arcpy.ClearWorkspaceCache_management(dla.workspace) dla.cleanupGarbage() arcpy.ResetProgressor() if ignore == True: success = True return success
def setFieldValues(table,fields,names,ftypes,lengths): # from source xml file match old values to new values to prepare for append to target geodatabase success = False row = None try: updateCursor = arcpy.da.UpdateCursor(table,names) result = arcpy.GetCount_management(table) numFeat = int(result.getOutput(0)) dla.addMessage(table + ", " + str(numFeat) + " features") i = 0 arcpy.SetProgressor("Step","Calculating " + table + "...",0,numFeat,getProgressUpdate(numFeat)) for row in updateCursor: success = True if dla._errCount > dla.maxErrorCount: #dla.addMessage("Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount)) dla.addError("Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount)) return False if i > dla.maxrows: #dla.addMessage("Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows)) dla.addError("Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows)) return True i = i + 1 setProgressor(i,numFeat) for field in fields: method = "None" sourceName = dla.getNodeValue(field,"SourceName") targetName = dla.getNodeValue(field,"TargetName") targetValue = getTargetValue(row,field,names,sourceName,targetName) sourceValue = getSourceValue(row,names,sourceName,targetName) method = dla.getNodeValue(field,"Method").replace(" ","") fnum = names.index(targetName) if method == "None" or (method == "Copy" and sourceName == dla._noneFieldName): val = None method = "None" elif method == "Copy": val = sourceValue elif method == "DefaultValue": val = dla.getNodeValue(field,"DefaultValue") elif method == "SetValue": val = dla.getNodeValue(field,"SetValue") elif method == "ValueMap": val = getValueMap(row,names,sourceValue,field) elif method == "ChangeCase": case = dla.getNodeValue(field,method) expression = getChangeCase(sourceValue,case) val = getExpression(row,names,expression) elif method == "Concatenate": val = getConcatenate(row,names,field) elif method == "Left": chars = dla.getNodeValue(field,"Left") val = getSubstring(sourceValue,"0",chars) elif method == "Right": chars = dla.getNodeValue(field,"Right") val = getSubstring(sourceValue,len(str(sourceValue))-int(chars),len(str(sourceValue))) elif method == "Substring": start = int(dla.getNodeValue(field,"Start")) lngth = int(dla.getNodeValue(field,"Length")) if sourceValue != None: lngth = start + lngth val = getSubstring(sourceValue,start,lngth) elif method == "Split": splitter = dla.getNodeValue(field,"SplitAt") splitter = splitter.replace("(space)"," ") part = dla.getNodeValue(field,"Part") val = getSplit(sourceValue,splitter,part) elif method == "ConditionalValue": sname = dla.getNodeValue(field,"SourceName") oper = dla.getNodeValue(field,"Oper") iif = dla.getNodeValue(field,"If") if iif != " " and type(iif) == 'str': for name in names: if name in iif: iif = iif.replace(name,"|"+name+"|") tthen = dla.getNodeValue(field,"Then") eelse = dla.getNodeValue(field,"Else") for name in names: if name in eelse: eelse = eelse.replace(name,"|"+name+"|") expression = "|" + tthen + "| " + " if |" + sname + "| " + oper + " |" + iif + "| else " + eelse val = getExpression(row,names,expression) elif method == "Expression": expression = dla.getNodeValue(field,method) for name in names: expression = expression.replace(name,"|" + name + "|") val = getExpression(row,names,expression) # set field value if method != "None" and val != None: newVal = getValue(names,ftypes,lengths,targetName,targetValue,val) row[fnum] = newVal else: row[fnum] = val try: updateCursor.updateRow(row) except: dla._errCount += 1 success = False err = "Exception caught: unable to update row" printRow(row,names) dla.showTraceback() dla.addError(err) except: dla._errCount += 1 success = False err = "Exception caught: unable to update dataset" if row != None: printRow(row,names) dla.showTraceback() dla.addError(err) finally: del updateCursor dla.cleanupGarbage() arcpy.ResetProgressor() return success
def calculate(xmlFileName, workspace, name, ignore): dla.workspace = workspace success = True arcpy.ClearWorkspaceCache_management(dla.workspace) xmlDoc = dla.getXmlDoc(xmlFileName) dla.addMessage("Field Calculator: " + xmlFileName) arcpy.env.Workspace = dla.workspace table = dla.getTempTable(name) if not arcpy.Exists(table): dla.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): dla.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dla.getXmlElements(xmlFileName, "Field") sourceFields = dla.getXmlElements(xmlFileName, "SourceField") targetFields = dla.getXmlElements(xmlFileName, "TargetField") attrs = [f.name for f in arcpy.ListFields(table)] target_values = CaseInsensitiveDict() #Fix read into dict, using NM as key # at this point just getting the list of all target field names/types/lengths for target in targetFields: nm = target.getAttributeNode("Name").nodeValue target_values[nm] = dict( ftype=target.getAttributeNode("Type").nodeValue, flength=target.getAttributeNode("Length").nodeValue) for field in fields: arcpy.env.Workspace = dla.workspace targetName = dla.getNodeValue(field, "TargetName") sourceName = dla.getNodeValue(field, "SourceName") ftype = "String" flength = "50" if targetName in target_values: ftype = target_values[targetName]['ftype'] flength = target_values[targetName]['flength'] # make sure the field exists in the field calculator dataset, this will include all source and target fields. retcode = dla.addDlaField(table, targetName, field, attrs, ftype, flength) if retcode == False: addError("Unable to add field " + targetName + " to database to calculate values, exiting") allFields = sourceFields + targetFields # this should be the same as the dataset fields at this point desc = arcpy.Describe(table) layerNames = [] names = [] ftypes = [] lengths = [] ignore = dla.getIgnoreFieldNames( desc ) # gdb system fields that will be handled automatically and cannot be calculated ignore = [nm.upper() for nm in ignore] for field in desc.fields: # get the uppercase names for everything that exists in the dataset if field.name.upper() not in ignore: layerNames.append(field.name.upper()) for field in allFields: # loop through everything that might exist nm = field.getAttributeNode("Name").nodeValue.replace( '.', '_') # handle joins and remaining . in field names if nm != dla._noneFieldName and nm.upper() not in ignore and nm.upper( ) in layerNames: # ignore the None and ignore fields and names not in the dataset idx = dla.getFieldIndexList(names, nm) if idx is None: # if the name is not already in the list names.append(nm) typ = field.getAttributeNode("Type").nodeValue leng = field.getAttributeNode("Length").nodeValue ftypes.append(typ) lengths.append(leng) #FIXME : Steve, was not sure why you were capturing an error here, and then doing something # from Steve - was looking for names that actually exist in the dataset and are not gdb system fields. No guarantee Xml matches dataset #try: #names.index(nm) #except: #names.append(nm) #typ = field.getAttributeNode("Type").nodeValue #leng = field.getAttributeNode("Length").nodeValue #ftypes.append(typ) #lengths.append(leng) retVal = setFieldValues(table, fields, names, ftypes, lengths) if retVal == False: success = False arcpy.ClearWorkspaceCache_management(dla.workspace) dla.cleanupGarbage() arcpy.ResetProgressor() if ignore == True: success = True return success
def setFieldValues(table, fields, names, ftypes, lengths): # from source xml file match old values to new values to prepare for append to target geodatabase success = False row = None try: updateCursor = arcpy.da.UpdateCursor(table, names) result = arcpy.GetCount_management(table) numFeat = int(result.getOutput(0)) dla.addMessage(table + ", " + str(numFeat) + " features") i = 0 arcpy.SetProgressor("Step", "Calculating " + table + "...", 0, numFeat, getProgressUpdate(numFeat)) for row in updateCursor: success = True if dla._errCount > dla.maxErrorCount: dla.addError( "Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount)) return False if i > dla.maxrows: dla.addError( "Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows)) return True i = i + 1 setProgressor(i, numFeat) for field in fields: method = "None" sourceName = dla.getNodeValue(field, "SourceName").replace('.', '_') targetName = dla.getNodeValue(field, "TargetName").replace('.', '_') targetValue = getTargetValue(row, field, names, sourceName, targetName) sourceValue = getSourceValue(row, names, sourceName, targetName) method = dla.getNodeValue(field, "Method").replace(" ", "") try: fnum = dla.getFieldIndexList(names, targetName) except: fnum = None # defensive check to skip fields that do not exist even though they are listed in Xml if fnum != None: if method == "None" or (method == "Copy" and sourceName == '(None)'): method = "None" val = None elif method == "Copy": val = sourceValue elif method == "DefaultValue": val = dla.getNodeValue(field, "DefaultValue") elif method == "SetValue": val = dla.getNodeValue(field, "SetValue") elif method == "ValueMap": val = getValueMap(targetName, sourceValue, field) elif method == "DomainMap": val = getDomainMap(row, sourceValue, field) elif method == "ChangeCase": case = dla.getNodeValue(field, method) expression = getChangeCase(sourceValue, case) val = getExpression(row, names, expression) elif method == "Concatenate": val = getConcatenate(row, names, field) elif method == "Left": chars = dla.getNodeValue(field, "Left") val = getSubstring(sourceValue, "0", chars) elif method == "Right": chars = dla.getNodeValue(field, "Right") val = getSubstring(sourceValue, len(str(sourceValue)) - int(chars), len(str(sourceValue))) elif method == "Substring": start = dla.getNodeValue(field, "Start") length = dla.getNodeValue(field, "Length") val = getSubstring(sourceValue, start, length) elif method == "Split": splitter = dla.getNodeValue(field, "SplitAt") splitter = splitter.replace("(space)", " ") part = dla.getNodeValue(field, "Part") val = getSplit(sourceValue, splitter, part) elif method == "ConditionalValue": sname = dla.getNodeValue(field, "SourceName") oper = dla.getNodeValue(field, "Oper") iif = dla.getNodeValue(field, "If") if iif != " " and type(iif) == 'str': for name in names: if name in iif: iif = iif.replace(name, "|" + name + "|") tthen = dla.getNodeValue(field, "Then") eelse = dla.getNodeValue(field, "Else") for name in names: if name in eelse: eelse = eelse.replace(name, "|" + name + "|") expression = "|" + tthen + "| " + " if |" + sname + "| " + oper + " |" + iif + "| else " + eelse val = getExpression(row, names, expression) elif method == "Expression": expression = dla.getNodeValue(field, method) for name in names: expression = expression.replace( name, "|" + name + "|") val = getExpression(row, names, expression) # set field value newVal = getValue(ftypes[fnum], lengths[fnum], targetName, targetValue, val) row[fnum] = newVal if dla.debug == True: dla.addMessage(targetName + ':' + str(newVal) + ':' + str(targetValue)) try: updateCursor.updateRow(row) #printRow(row,names) except: dla._errCount += 1 success = False err = "Exception caught: unable to update row" if dla._errCount < 200: printRow(row, names) dla.showTraceback() else: if dla._errCount < 2000: dla.addMessage( 'More than 200 errors encountered... debug output suppressed' ) dla.addError(err) except: dla._errCount += 1 success = False err = "Exception caught: unable to update dataset" if row != None: printRow(row, names) dla.showTraceback() dla.addError(err) finally: del updateCursor dla.cleanupGarbage() arcpy.ResetProgressor() return success
def publish(xmlFileNames): # function called from main or from another script, performs the data update processing global _useReplaceSettings dla._errCount = 0 arcpy.SetProgressor("default","Data Assistant") arcpy.SetProgressorLabel("Data Assistant") xmlFiles = xmlFileNames.split(";") layers = [] for xmlFile in xmlFiles: # multi value parameter, loop for each file xmlFile = dla.getXmlDocName(xmlFile) dla.addMessage("Configuration file: " + xmlFile) xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document if xmlDoc == None: return prj = dla.setProject(xmlFile,dla.getNodeValue(xmlDoc,"Project")) if prj == None: dla.addError("Unable to open your project, please ensure it is in the same folder as your current project or your Config file") return False source = dla.getDatasetPath(xmlDoc,"Source") target = dla.getDatasetPath(xmlDoc,"Target") targetName = dla.getDatasetName(target) dla.addMessage(source) dla.addMessage(target) if dlaService.checkLayerIsService(source) or dlaService.checkLayerIsService(target): token = dlaService.getSigninToken() # when signed in get the token and use this. Will be requested many times during the publish # exit here before doing other things if not signed in if token == None: dla.addError("User must be signed in for this tool to work with services") return False expr = getWhereClause(xmlDoc) if _useReplaceSettings == True and (expr == '' or expr == None): dla.addError("There must be an expression for replacing by field value, current value = " + str(expr)) return False errs = False if dlaService.validateSourceUrl(source) == False: dla.addError("Source path does not appear to be a valid feature layer") errs = True if _useReplaceSettings == True: if dlaService.validateTargetReplace(target) == False: dla.addError("Target path does not have correct privileges") errs = True elif _useReplaceSettings == False: if dlaService.validateTargetAppend(target) == False: dla.addError("Target path does not have correct privileges") errs = True if errs: return False dla.setWorkspace() if dla.isTable(source) or dla.isTable(target): datasetType = 'Table' else: datasetType = 'FeatureClass' if not dla.isStaged(xmlDoc): res = dlaExtractLayerToGDB.extract(xmlFile,None,dla.workspace,source,target,datasetType) if res != True: table = dla.getTempTable(targetName) msg = "Unable to export data, there is a lock on existing datasets or another unknown error" if arcpy.TestSchemaLock(table) != True and arcpy.Exists(table) == True: msg = "Unable to export data, there is a lock on the intermediate feature class: " + table dla.addError(msg) print(msg) return else: res = dlaFieldCalculator.calculate(xmlFile,dla.workspace,targetName,False) if res == True: dlaTable = dla.getTempTable(targetName) res = doPublish(xmlDoc,dlaTable,target,_useReplaceSettings) else: dla.addMessage('Data previously staged, will proceed using intermediate dataset') dlaTable = dla.workspace + os.sep + dla.getStagingName(source,target) res = doPublish(xmlDoc,dlaTable,target,_useReplaceSettings) if res == True: dla.removeStagingElement(xmlDoc) xmlDoc.writexml(open(xmlFile, 'wt', encoding='utf-8')) dla.addMessage('Staging element removed from config file') arcpy.ResetProgressor() if res == False: err = "Data Assistant Update Failed, see messages for details" dla.addError(err) print(err) else: layers.append(target) arcpy.SetParameter(_outParam,';'.join(layers))
def calculate(xmlFileName,workspace,name,ignore): dla.workspace = workspace success = True arcpy.ClearWorkspaceCache_management(dla.workspace) xmlDoc = dla.getXmlDoc(xmlFileName) arcpy.env.Workspace = dla.workspace table = dla.getTempTable(name) if not arcpy.Exists(table): dla.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): dla.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dla.getXmlElements(xmlFileName,"Field") sourceFields = dla.getXmlElements(xmlFileName,"SourceField") targetFields = dla.getXmlElements(xmlFileName,"TargetField") attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = dla.workspace targetName = dla.getNodeValue(field,"TargetName") sourceName = dla.getNodeValue(field,"SourceName") type = "String" length = "50" for target in targetFields: nm = target.getAttributeNode("Name").nodeValue if nm == targetName: type = target.getAttributeNode("Type").nodeValue length = target.getAttributeNode("Length").nodeValue # uppercase compare, later need to check for orig/upper name for calc #ups = [nm.upper() for nm in attrs] dla.addDlaField(table,targetName,field,attrs,type,length) allFields = sourceFields + targetFields names = [] types = [] lengths = [] for field in allFields: nm = field.getAttributeNode("Name").nodeValue if nm != dla.noneName: names.append(nm) typ = field.getAttributeNode("Type").nodeValue leng = field.getAttributeNode("Length").nodeValue types.append(typ) lengths.append(leng) retVal = setFieldValues(table,fields,names,types,lengths) if retVal == False: success = False arcpy.ClearWorkspaceCache_management(dla.workspace) dla.cleanupGarbage() arcpy.ResetProgressor() if ignore == True: success = True return success
elif method == "ChangeCase": case = dla.getNodeValue(field, method) expression = getChangeCase(sourceValue, case) val = getExpression(row, names, expression) elif method == "Concatenate": val = getConcatenate(row, names, field) elif method == "Left": chars = dla.getNodeValue(field, "Left") val = getSubstring(sourceValue, "0", chars) elif method == "Right": chars = dla.getNodeValue(field, "Right") val = getSubstring(sourceValue, len(str(sourceValue)) - int(chars), len(str(sourceValue))) elif method == "Substring": start = dla.getNodeValue(field, "Start") length = dla.getNodeValue(field, "Length") val = getSubstring(sourceValue, start, length) elif method == "Split": splitter = dla.getNodeValue(field, "SplitAt") splitter = splitter.replace("(space)", " ") part = dla.getNodeValue(field, "Part") val = getSplit(sourceValue, splitter, part) elif method == "ConditionalValue": sname = dla.getNodeValue(field, "SourceName") oper = dla.getNodeValue(field, "Oper") iif = dla.getNodeValue(field, "If") if iif != " " and type(iif) == 'str': for name in names: if name in iif: iif = iif.replace(name, "|" + name + "|")
def setFieldValues(table, fields, names, types, lengths): # from source xml file match old values to new values to prepare for append to target geodatabase success = False row = None try: updateCursor = arcpy.da.UpdateCursor(table, names) result = arcpy.GetCount_management(table) numFeat = int(result.getOutput(0)) dla.addMessage(table + ", " + str(numFeat) + " features") i = 0 arcpy.SetProgressor("Step", "Calculating " + table + "...", 0, numFeat, getProgressUpdate(numFeat)) for row in updateCursor: success = True if dla._errCount > dla.maxErrorCount: #dla.addMessage("Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount)) dla.addError( "Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount)) return False if i > dla.maxrows: #dla.addMessage("Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows)) dla.addError( "Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows)) return True i = i + 1 setProgressor(i, numFeat) for field in fields: method = "None" sourceName = dla.getNodeValue(field, "SourceName") targetName = dla.getNodeValue(field, "TargetName") targetValue = getTargetValue(row, field, names, sourceName, targetName) sourceValue = getSourceValue(row, names, sourceName, targetName) method = dla.getNodeValue(field, "Method").replace(" ", "") if method == "None" or (method == "Copy" and sourceName == '(None)'): method = "None" val = None elif method == "Copy": val = sourceValue elif method == "DefaultValue": val = dla.getNodeValue(field, "DefaultValue") elif method == "SetValue": val = dla.getNodeValue(field, "SetValue") elif method == "ValueMap": val = getValueMap(row, names, sourceValue, field) elif method == "ChangeCase": case = dla.getNodeValue(field, method) expression = getChangeCase(sourceValue, case) val = getExpression(row, names, expression) elif method == "Concatenate": val = getConcatenate(row, names, field) elif method == "Left": chars = dla.getNodeValue(field, "Left") val = getSubstring(sourceValue, "0", chars) elif method == "Right": chars = dla.getNodeValue(field, "Right") val = getSubstring(sourceValue, len(str(sourceValue)) - int(chars), len(str(sourceValue)))
def publish(xmlFileNames): # function called from main or from another script, performs the data update processing global sourceLayer,targetLayer,_success dla._errorCount = 0 arcpy.SetProgressor("default","Data Assistant") arcpy.SetProgressorLabel("Data Assistant") xmlFiles = xmlFileNames.split(";") for xmlFile in xmlFiles: # multi value parameter, loop for each file dla.addMessage("Configuration file: " + xmlFile) xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document if xmlDoc == None: return svceS = False svceT = False if sourceLayer == "" or sourceLayer == None: sourceLayer = dla.getNodeValue(xmlDoc,"Source") svceS = dla.checkLayerIsService(sourceLayer) if targetLayer == "" or targetLayer == None: targetLayer = dla.getNodeValue(xmlDoc,"Target") svceT = dla.checkLayerIsService(targetLayer) dla.addMessage(targetLayer) ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer if not dla.checkServiceCapabilities(sourceLayer,True): return False if not dla.checkServiceCapabilities(targetLayer,True): return False if svceS == True or svceT == True: token = dla.getSigninToken() # when signed in get the token and use this. Will be requested many times during the publish if token == None: dla.addError("User must be signed in for this tool to work with services") return expr = getWhereClause(xmlDoc) if useReplaceSettings == True and (expr == '' or expr == None): dla.addError("There must be an expression for replacing by field value, current value = " + str(expr)) return False dla.setWorkspace() targetName = dla.getTargetName(xmlDoc) res = dlaExtractLayerToGDB.extract(xmlFile,None,dla.workspace,sourceLayer,targetName) if res != True: table = dla.getTempTable(targetName) msg = "Unable to export data, there is a lock on existing datasets or another unknown error" if arcpy.TestSchemaLock(table) != True: msg = "Unable to export data, there is a lock on the intermediate feature class: " + table dla.addError(msg) print(msg) return else: res = dlaFieldCalculator.calculate(xmlFile,dla.workspace,targetName,False) if res == True: dlaTable = dla.getTempTable(targetName) res = doPublish(xmlDoc,dlaTable,targetLayer) arcpy.ResetProgressor() sourceLayer = None # set source and target back to None for multiple file processing targetLayer = None if res == False: err = "Data Assistant Update Failed, see messages for details" dla.addError(err) print(err)
def calculate(xmlFileName, workspace, name, ignore): dla.workspace = workspace success = True arcpy.ClearWorkspaceCache_management(dla.workspace) xmlDoc = dla.getXmlDoc(xmlFileName) arcpy.env.Workspace = dla.workspace table = dla.getTempTable(name) if not arcpy.Exists(table): dla.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): dla.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dla.getXmlElements(xmlFileName, "Field") sourceFields = dla.getXmlElements(xmlFileName, "SourceField") targetFields = dla.getXmlElements(xmlFileName, "TargetField") attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = dla.workspace targetName = dla.getNodeValue(field, "TargetName") sourceName = dla.getNodeValue(field, "SourceName") type = "String" length = "50" for target in targetFields: nm = target.getAttributeNode("Name").nodeValue if nm == targetName: type = target.getAttributeNode("Type").nodeValue length = target.getAttributeNode("Length").nodeValue # uppercase compare, later need to check for orig/upper name for calc #ups = [nm.upper() for nm in attrs] dla.addDlaField(table, targetName, field, attrs, type, length) allFields = sourceFields + targetFields names = [] types = [] lengths = [] for field in allFields: nm = field.getAttributeNode("Name").nodeValue if nm != dla.noneName: names.append(nm) typ = field.getAttributeNode("Type").nodeValue leng = field.getAttributeNode("Length").nodeValue types.append(typ) lengths.append(leng) retVal = setFieldValues(table, fields, names, types, lengths) if retVal == False: success = False arcpy.ClearWorkspaceCache_management(dla.workspace) dla.cleanupGarbage() arcpy.ResetProgressor() if ignore == True: success = True return success
def publish(xmlFileNames): # function called from main or from another script, performs the data update processing global sourceLayer, targetLayer, _success dla._errorCount = 0 arcpy.SetProgressor("default", "Data Assistant") arcpy.SetProgressorLabel("Data Assistant") xmlFiles = xmlFileNames.split(";") for xmlFile in xmlFiles: # multi value parameter, loop for each file dla.addMessage("Configuration file: " + xmlFile) xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document if xmlDoc == None: return svceS = False svceT = False if sourceLayer == "" or sourceLayer == None: sourceLayer = dla.getNodeValue(xmlDoc, "Source") svceS = dla.checkLayerIsService(sourceLayer) if targetLayer == "" or targetLayer == None: targetLayer = dla.getNodeValue(xmlDoc, "Target") svceT = dla.checkLayerIsService(targetLayer) dla.addMessage(targetLayer) ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer if not dla.checkServiceCapabilities(sourceLayer, True): return False if not dla.checkServiceCapabilities(targetLayer, True): return False if svceS == True or svceT == True: token = dla.getSigninToken( ) # when signed in get the token and use this. Will be requested many times during the publish if token == None: dla.addError( "User must be signed in for this tool to work with services" ) return expr = getWhereClause(xmlDoc) if useReplaceSettings == True and (expr == '' or expr == None): dla.addError( "There must be an expression for replacing by field value, current value = " + str(expr)) return False dla.setWorkspace() targetName = dla.getTargetName(xmlDoc) res = dlaExtractLayerToGDB.extract(xmlFile, None, dla.workspace, sourceLayer, targetName) if res != True: table = dla.getTempTable(targetName) msg = "Unable to export data, there is a lock on existing datasets or another unknown error" if arcpy.TestSchemaLock(table) != True: msg = "Unable to export data, there is a lock on the intermediate feature class: " + table dla.addError(msg) print(msg) return else: res = dlaFieldCalculator.calculate(xmlFile, dla.workspace, targetName, False) if res == True: dlaTable = dla.getTempTable(targetName) res = doPublish(xmlDoc, dlaTable, targetLayer) arcpy.ResetProgressor() sourceLayer = None # set source and target back to None for multiple file processing targetLayer = None if res == False: err = "Data Assistant Update Failed, see messages for details" dla.addError(err) print(err)
def stage(xmlFileNames): global source, target, rowLimit dla.setWorkspace() dla._errCount = 0 outlayers = [] for xmlFileName in xmlFileNames.split(';'): xmlFileName = dla.getXmlDocName(xmlFileName) xmlDoc = dla.getXmlDoc(xmlFileName) prj = dla.setProject(xmlFileName, dla.getNodeValue(xmlDoc, "Project")) if prj == None: dla.addError( "Unable to open your project, please ensure it is in the same folder as your current project or your Config file" ) if rowLimit == "" or rowLimit == None: rowLimit = None if source == "" or source == None: source = dla.getDatasetPath(xmlDoc, "Source") if target == "" or target == None: target = dla.getDatasetPath(xmlDoc, "Target") if dla.isTable(source) or dla.isTable(target): datasetType = 'Table' else: datasetType = 'FeatureClass' targetName = dla.getStagingName(source, target) targetDS = os.path.join(dla.workspace, targetName) res = dlaExtractLayerToGDB.extract(xmlFileName, rowLimit, dla.workspace, source, targetDS, datasetType) if res == True: res = dlaFieldCalculator.calculate(xmlFileName, dla.workspace, targetName, False) if res == True: arcpy.env.addOutputsToMap = True layer = targetName layertmp = targetName + "tmp" if arcpy.Exists(layertmp): arcpy.Delete_management(layertmp) if dla.isTable(targetDS): arcpy.MakeTableView_management(targetDS, layertmp) else: arcpy.MakeFeatureLayer_management(targetDS, layertmp) fieldInfo = dla.getLayerVisibility(layertmp, xmlFileName) if dla.isTable(targetDS): arcpy.MakeTableView_management(targetDS, layer, None, dla.workspace, fieldInfo) else: arcpy.MakeFeatureLayer_management(targetDS, layer, None, dla.workspace, fieldInfo) # should make only the target fields visible outlayers.append(layer) ### *** need to insert tag in xml file... dla.insertStagingElement(xmlDoc) try: xmlDoc.writexml(open(xmlFileName, 'wt', encoding='utf-8')) dla.addMessage('Staging element written to config file') except: dla.addMessage("Unable to write data to xml file") xmlDoc.unlink() else: dla.addError("Failed to Extract data") print("Failed to Extract data") if outlayers != []: arcpy.SetParameter(_derived, ";".join(outlayers)) dla.writeFinalMessage("Data Assistant - Stage")