def checkValueMaps(dataset,table,field,fieldName,mapName): global valueMaps method = gzSupport.getNodeValue(field,"Method") success = True if method == "ValueMap": fieldMapName = gzSupport.getNodeValue(field,"ValueMapName") otherwise = gzSupport.getNodeValue(field,"ValueMapOtherwise") found = False for map in valueMaps: mapNodeName = map.getAttributeNode("name").nodeValue if mapNodeName == fieldMapName and not found: found = True # it is possible for the same value map to be present in multiple gizinta project files, just use the first one. mapValues = gzSupport.getNodeValue(map,mapName).split(",") if otherwise != None and otherwise != '' and otherwise not in mapValues and not otherwise.count(" ") > 2: mapValues.append(otherwise) values = gzSupport.getFieldValues("Unique",[fieldName],[dataset]) uniqueValues = values[0] #delta = len(uniqueValues[0]) - len(mapValues) mismatch = [] for uVal in uniqueValues: if uVal not in mapValues: mismatch.append(uVal) if len(mismatch) > 0 and not otherwise.count(" ") > 2: gzSupport.addError(str(len(mismatch)) + " mismatches for " + fieldName + ", results located in " + gzSupport.errorTableName) for uVal in mismatch: gzSupport.addError("'" + str(uVal) + "' not found in value map " + str(fieldMapName)) gzSupport.logProcessError(table,gzSupport.sourceIDField,"",fieldName,"Mismatched Value Map:" + str(uVal)) success = False elif len(mismatch) == 0: gzSupport.addMessage("No mismatches found for ValueMaps") return success
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = gzSupport.getFullName(name,tNames, tFullNames) #table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") name = '' try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False fieldName = gzSupport.getNodeValue(field,"TargetName") if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False try: gzSupport.logDatasetProcess(name,fieldName,retVal) except: gzSupport.addMessage("Process not logged for field") arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess("sourceTargetQA",name,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log file tools\\log\\sourceTargetQA.log for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def checkValueMaps(dataset,table,field,fieldName,mapName): global valueMaps method = gzSupport.getNodeValue(field,"Method") success = True if method == "ValueMap": fieldMapName = gzSupport.getNodeValue(field,"ValueMapName") otherwise = gzSupport.getNodeValue(field,"ValueMapOtherwise") for map in valueMaps: mapNodeName = map.getAttributeNode("name").nodeValue if mapNodeName == fieldMapName: mapValues = gzSupport.getNodeValue(map,mapName).split(",") if otherwise != None and otherwise != '' and otherwise not in mapValues and not otherwise.count(" ") > 2: mapValues.append(otherwise) values = gzSupport.getFieldValues("Unique",[fieldName],[dataset]) uniqueValues = values[0] #delta = len(uniqueValues[0]) - len(mapValues) mismatch = [] for uVal in uniqueValues: if uVal not in mapValues: mismatch.append(uVal) if len(mismatch) > 0 and not otherwise.count(" ") > 2: gzSupport.addError(str(len(mismatch)) + " mismatches for " + fieldName + ", results located in " + gzSupport.errorTableName) for uVal in mismatch: gzSupport.addError("'" + str(uVal) + "' not found in value map " + str(fieldMapName)) gzSupport.logProcessError(table,gzSupport.sourceIDField,"",fieldName,"Mismatched Value Map:" + str(uVal)) success = False elif len(mismatch) == 0: gzSupport.addMessage("No mismatches found for ValueMaps") return success
def runFieldCheck(dataset,table,field,sourceQA,targetQA): success = True if sourceQA == True: sourceName = gzSupport.getNodeValue(field,"SourceName") retVal = runOneFieldCheck(dataset,table,field,"SourceName") if retVal == False: success = False if targetQA == True: targetName = gzSupport.getNodeValue(field,"TargetName") retVal = runOneFieldCheck(dataset,table,field,"TargetName") if retVal == False: success = False return success
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False gzSupport.logDatasetProcess(name,fieldName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess(name,"",False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def runFieldCheck(dataset,table,field,sourceQA,targetQA): sourceName = gzSupport.getNodeValue(field,"SourceName") targetName = gzSupport.getNodeValue(field,"TargetName") success = True if sourceQA == True: retVal = runOneFieldCheck(dataset,table,field,"SourceName") if retVal == False: success = False if targetQA == True: retVal = runOneFieldCheck(dataset,table,field,"TargetName") if retVal == False: success = False return success
def runOneFieldCheck(dataset,table,field,fieldTag): success = True fieldVal = field.getElementsByTagName(fieldTag)[0] qaRulesField = fieldVal.getAttributeNode("qa").nodeValue fieldName = gzSupport.getNodeValue(field,fieldTag) gzSupport.addMessage("Field QA (" + qaRulesField + ")for " + fieldName) if fieldTag == "SourceName": mapName = "SourceValues" else: mapName = "TargetValues" if qaRulesField.find("Unique") > -1: retVal = findDuplicates(dataset,table,fieldName) if retVal == False: success = False gzSupport.logProcessError(table,"FieldName",fieldName,"","Duplicate values found") if qaRulesField.find("Required") > -1 and qaRulesField.find("Unique") == -1: retVal = getCountNullBlank(table,fieldName,"") if retVal == False: success = False gzSupport.logProcessError(table,"FieldName",fieldName,"","Null or blank values found") if qaRulesField.find("ValueMaps") > -1: retVal = checkValueMaps(dataset,table,field,fieldName,mapName) if retVal == False: success = False gzSupport.logProcessError(table,"FieldName",fieldName,"","Values found that do not match ValueMaps") if qaRulesField.find("Check") > -1: retVal = getCountNullBlank(table,fieldName,"") #retVal = findDuplicates(dataset,table,fieldName) retVal = checkValueMaps(dataset,table,field,fieldName,mapName) return success
def exportDataset(sourceLayer, targetName, dataset): result = True targetTable = os.path.join(gzSupport.workspace, targetName) gzSupport.addMessage("Exporting Layer from " + sourceLayer) whereClause = "" try: try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") except: whereClause = '' gzSupport.addMessage("Where '" + whereClause + "'") sourceName = sourceLayer[sourceLayer.rfind(os.sep) + 1:sourceLayer.lower().rfind(".lyr")] viewName = sourceName + "_View" xmlFields = xmlDoc.getElementsByTagName("Field") view = gzSupport.makeFeatureViewForLayer(gzSupport.workspace, sourceLayer, viewName, whereClause, xmlFields) count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source rows") arcpy.FeatureClassToFeatureClass_conversion(view, gzSupport.workspace, targetName) except: err = "Failed to create new dataset " + targetName gzSupport.showTraceback() gzSupport.addError(err) gzSupport.logProcessError(sourceLayer, gzSupport.sourceIDField, sourceLayer, targetName, err) result = False return result
def exportDataset(sourceLayer, targetName, dataset): result = True targetTable = os.path.join(gzSupport.workspace, targetName) gzSupport.addMessage("Exporting Layer from " + sourceLayer) whereClause = "" try: try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") except: whereClause = "" gzSupport.addMessage("Where '" + whereClause + "'") sourceName = sourceLayer[sourceLayer.rfind(os.sep) + 1 : sourceLayer.lower().rfind(".lyr")] viewName = sourceName + "_View" xmlFields = xmlDoc.getElementsByTagName("Field") view = gzSupport.makeFeatureViewForLayer(gzSupport.workspace, sourceLayer, viewName, whereClause, xmlFields) count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source rows") arcpy.FeatureClassToFeatureClass_conversion(view, gzSupport.workspace, targetName) except: err = "Failed to create new dataset " + targetName gzSupport.showTraceback() gzSupport.addError(err) gzSupport.logProcessError(sourceLayer, gzSupport.sourceIDField, sourceLayer, targetName, err) result = False return result
def main(argv=None): success = True gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] name = '' for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue table = gzSupport.getFullName(name, tNames, tFullNames) gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue gzSupport.sourceNameField = dataset.getAttributeNode( "sourceNameField").nodeValue if not arcpy.Exists(table): gzSupport.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dataset.getElementsByTagName("Field") try: attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = gzSupport.workspace targetName = gzSupport.getNodeValue(field, "TargetName") gzSupport.addGizintaField(table, targetName, field, attrs) retVal = setFieldValues(table, fields) if retVal == False: success = False gzSupport.logDatasetProcess(name, "Fields", retVal) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("fieldCalculator", name, False) finally: arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information" ) if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def importLayer(cadPath, cadName, dataset): result = False try: name = dataset.getAttributeNode("targetName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue table = os.path.join(gzSupport.workspace, name) layerName = dataset.getAttributeNode("sourceName").nodeValue layer = os.path.join(cadPath, cadName, layerName) gzSupport.addMessage("Importing Layer " + layer) try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") xmlFields = dataset.getElementsByTagName("Field") gzSupport.addMessage("Where " + whereClause) if not arcpy.Exists(table): err = "Feature Class " + name + " does not exist" gzSupport.addError(err) gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, name, err) return False if whereClause != '': view = gzSupport.makeFeatureView(gzSupport.workspace, layer, layerName + "_View", whereClause, xmlFields) else: view = layer count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source Features for " + name) if hasJoinTo(dataset) == True: res = joinToCsv(view, dataset, cadPath, cadName) result = res[0] view = res[1] else: view = view result = True if result == True and count > 0: arcpy.Append_management([view], table, "NO_TEST", "", "") arcpy.ClearWorkspaceCache_management(gzSupport.workspace) except: err = "Failed to import layer " + name gzSupport.addError(err) gzSupport.showTraceback() gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, layerName, err) gzSupport.cleanupGarbage() try: del view except: gzSupport.addMessage("") return result
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] name = '' for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue table = gzSupport.getFullName(name,tNames,tFullNames) gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue gzSupport.sourceNameField = dataset.getAttributeNode("sourceNameField").nodeValue if not arcpy.Exists(table): gzSupport.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dataset.getElementsByTagName("Field") try: attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = gzSupport.workspace targetName = gzSupport.getNodeValue(field,"TargetName") gzSupport.addGizintaField(table,targetName,field,attrs) retVal = setFieldValues(table,fields) if retVal == False: success = False gzSupport.logDatasetProcess(name,"Fields",retVal) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("fieldCalculator",name,False) finally: arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def importLayer(cadPath, cadName, dataset): result = False try: name = dataset.getAttributeNode("targetName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue table = os.path.join(gzSupport.workspace, name) layerName = dataset.getAttributeNode("sourceName").nodeValue layer = os.path.join(cadPath, cadName, layerName) gzSupport.addMessage("Importing Layer " + layer) try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") xmlFields = dataset.getElementsByTagName("Field") gzSupport.addMessage("Where " + whereClause) if not arcpy.Exists(table): err = "Feature Class " + name + " does not exist" gzSupport.addError(err) gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, name, err) return False if whereClause != "": view = gzSupport.makeFeatureView(gzSupport.workspace, layer, layerName + "_View", whereClause, xmlFields) else: view = layer count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source Features for " + name) if hasJoinTo(dataset) == True: res = joinToCsv(view, dataset, cadPath, cadName) result = res[0] view = res[1] else: view = view result = True if result == True and count > 0: arcpy.Append_management([view], table, "NO_TEST", "", "") arcpy.ClearWorkspaceCache_management(gzSupport.workspace) except: err = "Failed to import layer " + name gzSupport.addError(err) gzSupport.showTraceback() gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, layerName, err) gzSupport.cleanupGarbage() try: del view except: gzSupport.addMessage("") return result
success = True errCount = 0 while row: if errCount > gzSupport.maxErrorCount: return False i += 1 if i % 1000 == 0: gzSupport.addMessage("Feature " + str(i) + " processed") if i % progressUpdate == 0: arcpy.SetProgressorPosition(i) gzSupport.addMessage("Processing feature " + str(i)) for field in fields: method = "None" currentValue = "None" targetName = gzSupport.getNodeValue(field,"TargetName") try: sourceName = gzSupport.getNodeValue(field,"SourceName")# handle the case where the source field does not exist or is blank except: sourceName = "" if sourceName != "" and not sourceName.startswith("*"): try: currentValue = row.getValue(sourceName) except: #gzSupport.addMessage("No value for " + sourceName) currentValue = "None" # handle the case where the source field does not exist or is blank method = gzSupport.getNodeValue(field,"Method") if (method == "None" or method == "Copy") and currentValue == "None": method = "None"
targetWorkspace = arcpy.GetParameterAsText(3) # Connection to the version to be used/updated gzSupport.ignoreErrors = gzSupport.strToBool(arcpy.GetParameterAsText(4)) # boolean indicates whether to return False if errors encountered SUCCESS = 5 # parameter number for output success value if targetWorkspace == "" or targetWorkspace == "#": targetWorkspace = defaultWorkspace gzSupport.startLog() xmlDoc = xml.dom.minidom.parse(gzSupport.xmlFileName) datasets = gzSupport.getXmlElements(xmlDoc,"Dataset") rootElem = gzSupport.getRootElement(xmlDoc) gzSupport.logTableName = rootElem.getAttributeNode("logTableName").nodeValue gzSupport.errorTableName = rootElem.getAttributeNode("errorTableName").nodeValue settings = gzSupport.getXmlElements(xmlDoc,"AppendSettings")[0] fieldNames = gzSupport.getNodeValue(settings,"FieldNames") fieldNames = fieldNames.split(",") try: versionName = gzSupport.getNodeValue(settings,"VersionName") defaultVersionName = gzSupport.getNodeValue(settings,"DefaultVersionName") except: versionName = None defaultVersionName = None def main(argv = None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace
arcpy.GetParameterAsText(4) ) # boolean indicates whether to return False if errors encountered SUCCESS = 5 # parameter number for output success value if targetWorkspace == "" or targetWorkspace == "#": targetWorkspace = defaultWorkspace gzSupport.startLog() xmlDoc = xml.dom.minidom.parse(gzSupport.xmlFileName) datasets = gzSupport.getXmlElements(gzSupport.xmlFileName, "Dataset") rootElem = gzSupport.getRootElement(xmlDoc) gzSupport.logTableName = rootElem.getAttributeNode("logTableName").nodeValue gzSupport.errorTableName = rootElem.getAttributeNode("errorTableName").nodeValue settings = gzSupport.getXmlElements(gzSupport.xmlFileName, "AppendSettings")[0] fieldNames = gzSupport.getNodeValue(settings, "FieldNames") fieldNames = fieldNames.split(",") try: versionName = gzSupport.getNodeValue(settings, "VersionName") defaultVersionName = gzSupport.getNodeValue(settings, "DefaultVersionName") except: versionName = None defaultVersionName = None def main(argv=None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None:
success = True errCount = 0 while row: if errCount > gzSupport.maxErrorCount: return False i += 1 if i % 1000 == 0: gzSupport.addMessage("Feature " + str(i) + " processed") if i % progressUpdate == 0: arcpy.SetProgressorPosition(i) gzSupport.addMessage("Processing feature " + str(i)) for field in fields: method = "None" currentValue = "None" targetName = gzSupport.getNodeValue(field, "TargetName") try: sourceName = gzSupport.getNodeValue( field, "SourceName" ) # handle the case where the source field does not exist or is blank except: sourceName = "" if sourceName != "" and not sourceName.startswith("*"): try: if sourceName != targetName and sourceName.upper( ) == targetName.upper(): # special case for same name but different case - should already have the target name from extract functions currentValue = row.getValue(targetName) else: currentValue = row.getValue(sourceName)