def main(argv=None): success = True name = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Layers...", 0, progBar, 1) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") if not arcpy.Exists(sourceLayer): gzSupport.addError("Layer " + sourceLayer + " does not exist, exiting") return target = os.path.join(gzSupport.workspace, targetName) arcpy.env.Workspace = gzSupport.workspace if not arcpy.Exists(target): gzSupport.addMessage("Feature Class " + target + " does not exist") else: arcpy.Delete_management(target) try: retVal = exportDataset(sourceLayer, targetName, dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName, targetName, retVal) arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("extractLayerToGDB", name, False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def checkValueMaps(dataset,table,field,fieldName,mapName): global valueMaps method = gzSupport.getNodeValue(field,"Method") success = True if method == "ValueMap": fieldMapName = gzSupport.getNodeValue(field,"ValueMapName") otherwise = gzSupport.getNodeValue(field,"ValueMapOtherwise") for map in valueMaps: mapNodeName = map.getAttributeNode("name").nodeValue if mapNodeName == fieldMapName: mapValues = gzSupport.getNodeValue(map,mapName).split(",") if otherwise != None and otherwise != '' and otherwise not in mapValues and not otherwise.count(" ") > 2: mapValues.append(otherwise) values = gzSupport.getFieldValues("Unique",[fieldName],[dataset]) uniqueValues = values[0] #delta = len(uniqueValues[0]) - len(mapValues) mismatch = [] for uVal in uniqueValues: if uVal not in mapValues: mismatch.append(uVal) if len(mismatch) > 0 and not otherwise.count(" ") > 2: gzSupport.addError(str(len(mismatch)) + " mismatches for " + fieldName + ", results located in " + gzSupport.errorTableName) for uVal in mismatch: gzSupport.addError("'" + str(uVal) + "' not found in value map " + str(fieldMapName)) gzSupport.logProcessError(table,gzSupport.sourceIDField,"",fieldName,"Mismatched Value Map:" + str(uVal)) success = False elif len(mismatch) == 0: gzSupport.addMessage("No mismatches found for ValueMaps") return success
def getDocument(dataset): gzSupport.addMessage(dataset) desc = arcpy.Describe(dataset) xmlDoc = Document() root = xmlDoc.createElement('table') xmlDoc.appendChild(root) root.setAttribute("xmlns",'http://gizinta.com') if desc.baseName.find('.') > -1: baseName = desc.baseName[desc.baseName.rfind('.')+1:] else: baseName = desc.baseName source = xmlDoc.createElement("data") source.setAttribute("name",baseName) root.appendChild(source) fields = getFields(dataset) i=0 try: for field in fields: fNode = xmlDoc.createElement("row") fNode.setAttribute("id",str(i)) source.appendChild(fNode) addFieldElement(xmlDoc,fNode,"FieldName",field.name) addFieldElement(xmlDoc,fNode,"SourceField","") addFieldElement(xmlDoc,fNode,"SourceQA","Required") # need to get these values from template project. addFieldElement(xmlDoc,fNode,"TargetQA","Required") addFieldElement(xmlDoc,fNode,"SourceMethod","Copy") addFieldElement(xmlDoc,fNode,"FieldType",field.type) addFieldElement(xmlDoc,fNode,"FieldLength",str(field.length)) i += 1 xmlStr = xmlDoc.toxml() except: gzSupport.showTraceback() xmlStr ="" return xmlStr
def main(argv=None): # main function - list the datasets and delete rows success = True try: names = gzSupport.listDatasets(sourceGDB) tNames = names[0] tFullNames = names[1] arcpy.SetProgressor("Step", "Deleting rows...", 0, len(tFullNames), 1) i = 0 for name in tFullNames: arcpy.SetProgressorPosition(i) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each full name if len(datasetNames) == 0 or tNames[i].upper() in datasetNames: retVal = doTruncate(name) gzSupport.logDatasetProcess(name, "deleteRowsGDB", retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + tNames[i]) i += i except: gzSupport.showTraceback() gzSupport.addError(pymsg) success = False gzSupport.logDatasetProcess(name, "deleteRowsGDB", success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(sourceGDB)
def checkValueMaps(dataset,table,field,fieldName,mapName): global valueMaps method = gzSupport.getNodeValue(field,"Method") success = True if method == "ValueMap": fieldMapName = gzSupport.getNodeValue(field,"ValueMapName") otherwise = gzSupport.getNodeValue(field,"ValueMapOtherwise") found = False for map in valueMaps: mapNodeName = map.getAttributeNode("name").nodeValue if mapNodeName == fieldMapName and not found: found = True # it is possible for the same value map to be present in multiple gizinta project files, just use the first one. mapValues = gzSupport.getNodeValue(map,mapName).split(",") if otherwise != None and otherwise != '' and otherwise not in mapValues and not otherwise.count(" ") > 2: mapValues.append(otherwise) values = gzSupport.getFieldValues("Unique",[fieldName],[dataset]) uniqueValues = values[0] #delta = len(uniqueValues[0]) - len(mapValues) mismatch = [] for uVal in uniqueValues: if uVal not in mapValues: mismatch.append(uVal) if len(mismatch) > 0 and not otherwise.count(" ") > 2: gzSupport.addError(str(len(mismatch)) + " mismatches for " + fieldName + ", results located in " + gzSupport.errorTableName) for uVal in mismatch: gzSupport.addError("'" + str(uVal) + "' not found in value map " + str(fieldMapName)) gzSupport.logProcessError(table,gzSupport.sourceIDField,"",fieldName,"Mismatched Value Map:" + str(uVal)) success = False elif len(mismatch) == 0: gzSupport.addMessage("No mismatches found for ValueMaps") return success
def main(argv = None): # main function - list the datasets and delete rows success = True try: names = gzSupport.listDatasets(sourceGDB) tNames = names[0] tFullNames = names[1] arcpy.SetProgressor("Step","Deleting rows...",0,len(tFullNames),1) i = 0 for name in tFullNames: arcpy.SetProgressorPosition(i) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each full name if len(datasetNames) == 0 or tNames[i].upper() in datasetNames: retVal = doTruncate(name) gzSupport.logDatasetProcess(name,"deleteRowsGDB",retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + tNames[i]) i += i except: gzSupport.showTraceback() gzSupport.addError(pymsg) success = False gzSupport.logDatasetProcess(name,"deleteRowsGDB",success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(sourceGDB)
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = gzSupport.getFullName(name,tNames, tFullNames) #table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") name = '' try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False fieldName = gzSupport.getNodeValue(field,"TargetName") if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False try: gzSupport.logDatasetProcess(name,fieldName,retVal) except: gzSupport.addMessage("Process not logged for field") arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess("sourceTargetQA",name,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log file tools\\log\\sourceTargetQA.log for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def runOneFieldCheck(dataset,table,field,fieldTag): success = True fieldVal = field.getElementsByTagName(fieldTag)[0] qaRulesField = fieldVal.getAttributeNode("qa").nodeValue fieldName = gzSupport.getNodeValue(field,fieldTag) gzSupport.addMessage("Field QA (" + qaRulesField + ")for " + fieldName) if fieldTag == "SourceName": mapName = "SourceValues" else: mapName = "TargetValues" if qaRulesField.find("Unique") > -1: retVal = findDuplicates(dataset,table,fieldName) if retVal == False: success = False gzSupport.logProcessError(table,"FieldName",fieldName,"","Duplicate values found") if qaRulesField.find("Required") > -1 and qaRulesField.find("Unique") == -1: retVal = getCountNullBlank(table,fieldName,"") if retVal == False: success = False gzSupport.logProcessError(table,"FieldName",fieldName,"","Null or blank values found") if qaRulesField.find("ValueMaps") > -1: retVal = checkValueMaps(dataset,table,field,fieldName,mapName) if retVal == False: success = False gzSupport.logProcessError(table,"FieldName",fieldName,"","Values found that do not match ValueMaps") if qaRulesField.find("Check") > -1: retVal = getCountNullBlank(table,fieldName,"") #retVal = findDuplicates(dataset,table,fieldName) retVal = checkValueMaps(dataset,table,field,fieldName,mapName) return success
def main(argv = None): # main function - list the datasets and delete rows success = True name = '' gzSupport.workspace = sourceGDB try: if len(datasetNames) == 0: names = gzSupport.listDatasets(sourceGDB) tNames = names[0] else: tNames = datasetNames arcpy.SetProgressor("Step","Deleting rows...",0,len(tNames),1) i = 0 for name in tNames: arcpy.SetProgressorPosition(i) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each full name if len(datasetNames) == 0 or gzSupport.nameTrimmer(name.upper()) in datasetNames: retVal = doTruncate(os.path.join(sourceGDB,name)) gzSupport.logDatasetProcess("deleteRowsGDB",name,retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) i = i + i except: gzSupport.showTraceback() gzSupport.addError("Failed to delete rows") success = False gzSupport.logDatasetProcess("deleteRowsGDB",name,success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(sourceGDB)
def main(argv = None): startTime = gzSupport.getDBTime() gzSupport.addMessage(startTime) success = True OpenBrowserURL(gzSupport.xmlFileName) gzSupport.closeLog() return
def main(argv = None): success = True try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Datasets...", 0,progBar, 1) deleteExistingRows(datasets) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...") if not arcpy.Exists(os.path.join(sourceWorkspace,sourceName)): gzSupport.addError(os.path.join(sourceWorkspace,sourceName + " does not exist, exiting")) return if not arcpy.Exists(os.path.join(gzSupport.workspace,targetName)): gzSupport.addMessage(os.path.join(gzSupport.workspace,targetName) + " does not exist") mode = "export" else: mode = "import" arcpy.env.Workspace = gzSupport.workspace try: if mode == "import": retVal = gzSupport.importDataset(sourceWorkspace,sourceName,targetName,dataset) elif mode == "export": retVal = gzSupport.exportDataset(sourceWorkspace,sourceName,targetName,dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName,targetName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("A Fatal Error occurred") success = False gzSupport.logDatasetProcess("","",False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(sourceWorkspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def deleteExistingRows(datasets): for dataset in datasets: name = dataset.getAttributeNode("targetName").nodeValue table = os.path.join(gzSupport.workspace, name) if arcpy.Exists(table): arcpy.DeleteRows_management(table) gzSupport.addMessage("Rows deleted from: " + name) else: gzSupport.addMessage(table + " does not exist")
def deleteExistingRows(datasets): for dataset in datasets: name = dataset.getAttributeNode("targetName").nodeValue table = os.path.join(gzSupport.workspace,name) if arcpy.Exists(table): arcpy.DeleteRows_management(table) gzSupport.addMessage("Rows deleted from: " + name) else: gzSupport.addMessage(table + " does not exist")
def OpenBrowserURL(xmlStrSource,xmlStrTarget): xmlDoc = xml.dom.minidom.parse(xmlFileName) xmlStrGizinta = xmlDoc.toxml() params = urllib.urlencode( {'source': xmlStrSource, 'target': xmlStrTarget, 'gizinta': xmlStrGizinta}) url="http://www.gizinta.com/giztest/scripts/GizintaMapper.php" f = urllib.urlopen(url, params) fileName = f.read() gzSupport.addMessage(fileName ) url = 'http://www.gizinta.com/giztest/gizinta.html?target='+fileName webbrowser.open(url,new=2)
def setFieldValues(table,fields): # from source xml file match old values to new values to prepare for append to target geodatabase success = False try: updateCursor = arcpy.UpdateCursor(table) row = updateCursor.next() except Exception, ErrorDesc: gzSupport.addMessage( "Unable to update the Dataset, Python error is: ") msg = str(getTraceback(Exception, ErrorDesc)) # this is the old style, could update gzSupport.addMessage(msg[msg.find("Error Info:"):]) row = None
def setFieldValues(table, fields): # from source xml file match old values to new values to prepare for append to target geodatabase success = False try: updateCursor = arcpy.UpdateCursor(table) row = updateCursor.next() except Exception, ErrorDesc: gzSupport.addMessage("Unable to update the Dataset, Python error is: ") msg = str(getTraceback(Exception, ErrorDesc)) gzSupport.addMessage(msg[msg.find("Error Info:"):]) row = None
def getCountNullBlank(table,field,extraExpr): whereClause = "\"" + field + "\" is Null " + extraExpr success = True desc = arcpy.Describe(os.path.join(gzSupport.workspace,table)) viewName = gzSupport.makeView(desc.dataElementType,gzSupport.workspace,table,"temp_"+field,whereClause,[]) count = int(arcpy.GetCount_management(viewName).getOutput(0)) if count > 0: gzSupport.addError(str(count) + " Null field values found") success = False else: gzSupport.addMessage("No Null field values found") return success
def getCountNullBlank(table,field,extraExpr): whereClause = "\"" + field + "\" is Null " + extraExpr success = True viewName = gzSupport.makeFeatureView(gzSupport.workspace,table,"temp_"+field,whereClause) count = int(arcpy.GetCount_management(viewName).getOutput(0)) if count > 0: gzSupport.addError(str(count) + " Null field values found") success = False else: gzSupport.addMessage("No Null field values found") return success
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False gzSupport.logDatasetProcess(name,fieldName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess(name,"",False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def OpenBrowserURL(xmlFileName): global url xmlDoc = xml.dom.minidom.parse(xmlFileName) xmlStrGizinta = xmlDoc.toxml() dsNode = xmlDoc.getElementsByTagName("Dataset")[0] target = dsNode.getAttributeNode("name").nodeValue #gzSupport.addMessage("tg = " + target) theData = [('gizinta', xmlStrGizinta),('target', target)] params = urllib.urlencode(theData) f = urllib.urlopen(url, params) fileName = f.read() gzSupport.addMessage(fileName ) url = 'http://www.gizinta.com/giztest/gizinta.html?target='+fileName webbrowser.open(url, new=2)
def getCountNullBlank(table, field, extraExpr): whereClause = "\"" + field + "\" is Null " + extraExpr success = True desc = arcpy.Describe(os.path.join(gzSupport.workspace, table)) viewName = gzSupport.makeView(desc.dataElementType, gzSupport.workspace, table, "temp_" + field, whereClause, []) count = int(arcpy.GetCount_management(viewName).getOutput(0)) if count > 0: gzSupport.addError(str(count) + " Null field values found") success = False else: gzSupport.addMessage("No Null field values found") return success
def OpenBrowserURL(xmlFileName): global url xmlDoc = xml.dom.minidom.parse(xmlFileName) xmlStrGizinta = xmlDoc.toxml() dsNode = xmlDoc.getElementsByTagName("Dataset")[0] target = dsNode.getAttributeNode("name").nodeValue theData = [('gizinta', xmlStrGizinta),('target', target),('automap',automap)] params = urllib.urlencode(theData) f = urllib.urlopen(url, params) fileName = f.read() gzSupport.addMessage(fileName ) if fileName.find("<Warning>") == -1 and fileName.find(">Error<") == -1: url = 'http://www.gizinta.com/fields/gizinta.html?target='+fileName webbrowser.open(url, new=2) else: gzSupport.addMessage("An error occurred interacting with gizinta.com. Please check your network connection and error messages printed above")
def findDuplicates(dataset,table,field): success = True uValues = gzSupport.getFieldValues("Unique",[field],[dataset]) uniqueValues = uValues[0] diffValues = uValues[1] fieldValues = gzSupport.getFieldValues("All",[field],[dataset])[0] delta = len(diffValues) if delta > 0: gzSupport.addMessage(str(len(fieldValues)) + " All : " + str(len(uniqueValues)) + " Unique") gzSupport.addError(str(delta) + " Duplicates found, results located in " + gzSupport.errorTableName) for x in diffValues: gzSupport.logProcessError(table,field,str(x),field,"Duplicate Value:" + str(x)) success = False elif delta == 0: gzSupport.addMessage("No Duplicates found") return success
def main(argv = None): startTime = gzSupport.getDBTime() gzSupport.addMessage(startTime) success = True xmlStrSource = getDocument(sourceDataset) xmlStrTarget = getDocument(targetDataset) OpenBrowserURL(xmlStrSource,xmlStrTarget) arcpy.SetParameter(gzSupport.successParameterNumber, success) arcpy.ResetProgressor() #t = Thread(target=OpenBrowserURL(xmlStr)) #t.start() #t.join() gzSupport.closeLog() return
def main(argv=None): # main function - list the source and target datasets, then append where there is a match on non-prefixed name success = True arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] s = 0 arcpy.SetProgressor("Step", "Appending rows...", 0, len(sFullNames), 1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names t = tNames.index(name) except: # will get here if no match t = -1 if t > -1: # append if there is a match if len(datasetNames) == 0 or name.upper() in datasetNames: retVal = doAppend(sFullNames[s], tFullNames[t]) gzSupport.logDatasetProcess(name, "appendAlltoGDB", retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + name) s = s + 1 except: gzSupport.showTraceback() gzSupport.addError(pymsg) success = False gzSupport.logDatasetProcess(name, "appendAlltoGDB", success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(targetGDB)
def main(argv = None): # main function - list the source and target datasets, then append where there is a match on non-prefixed name success = True arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] s = 0 arcpy.SetProgressor("Step","Appending rows...",0,len(sFullNames),1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names t = tNames.index(name) except: # will get here if no match t = -1 if t > -1: # append if there is a match if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames: retVal = doAppend(sFullNames[s],tFullNames[t]) gzSupport.logDatasetProcess(name,"appendAlltoGDB",retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) s = s + 1 except: gzSupport.showTraceback() gzSupport.addError("Unable to append datasets") success = False gzSupport.logDatasetProcess(name,"appendAlltoGDB",success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(targetGDB)
def main(argv = None): # main function - list the source and target datasets, then append where there is a match on non-prefixed name success = True name = '' arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: if len(datasetNames) == 0: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] else: sNames = datasetNames s = 0 arcpy.SetProgressor("Step","Appending rows...",0,len(sNames),1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Appending rows in " + name + "...") # for each source name if debug: gzSupport.addMessage(name) target = os.path.join(targetGDB,name) if arcpy.Exists(target): # append if there is a match if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames: retVal = doAppend(os.path.join(sourceGDB,name),target) gzSupport.logDatasetProcess("appendAlltoGDB",name,retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) s = s + 1 except: gzSupport.showTraceback() gzSupport.addError("Unable to append datasets") success = False gzSupport.logDatasetProcess("appendAlltoGDB",name,success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(targetGDB)
def importLayer(cadPath, cadName, dataset): result = False try: name = dataset.getAttributeNode("targetName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue table = os.path.join(gzSupport.workspace, name) layerName = dataset.getAttributeNode("sourceName").nodeValue layer = os.path.join(cadPath, cadName, layerName) gzSupport.addMessage("Importing Layer " + layer) try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") xmlFields = dataset.getElementsByTagName("Field") gzSupport.addMessage("Where " + whereClause) if not arcpy.Exists(table): err = "Feature Class " + name + " does not exist" gzSupport.addError(err) gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, name, err) return False if whereClause != '': view = gzSupport.makeFeatureView(gzSupport.workspace, layer, layerName + "_View", whereClause, xmlFields) else: view = layer count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source Features for " + name) if hasJoinTo(dataset) == True: res = joinToCsv(view, dataset, cadPath, cadName) result = res[0] view = res[1] else: view = view result = True if result == True and count > 0: arcpy.Append_management([view], table, "NO_TEST", "", "") arcpy.ClearWorkspaceCache_management(gzSupport.workspace) except: err = "Failed to import layer " + name gzSupport.addError(err) gzSupport.showTraceback() gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, layerName, err) gzSupport.cleanupGarbage() try: del view except: gzSupport.addMessage("") return result
def findDuplicates(dataset,table,field): success = True uValues = gzSupport.getFieldValues("Unique",[field],[dataset]) uniqueValues = uValues[0] diffValues = uValues[1] #fieldValues = gzSupport.getFieldValues("All",[field],[dataset])[0] delta = len(diffValues) if delta > 0: count = int(arcpy.GetCount_management(table).getOutput(0)) gzSupport.addMessage(str(count) + " rows : " + str(len(uniqueValues)) + " Unique") gzSupport.addError(str(delta) + " Duplicates found, results located in " + gzSupport.errorTableName) for x in diffValues: gzSupport.logProcessError(table,field,str(x),field,"Duplicate Value:" + str(x)) success = False elif delta == 0: gzSupport.addMessage("No Duplicates found") return success
def calcValue(row,attrs,calcString): # calculate a value based on fields and or other expressions if calcString.find("|") > -1: calcList = calcString.split("|") else: calcList = calcString.split("!") outVal = "" for strVal in calcList: if strVal in attrs: outVal += str(row.getValue(strVal)) else: outVal += strVal try: outVal = eval(outVal) except: gzSupport.addMessage("Error evaluating:" + outVal) gzSupport.showTraceback() gzSupport.addError("Error calculating field values:" + outVal) return outVal
def calcValue(row, attrs, calcString): # calculate a value based on fields and or other expressions if calcString.find("|") > -1: calcList = calcString.split("|") else: calcList = calcString.split("!") outVal = "" for strVal in calcList: if strVal in attrs: outVal += str(row.getValue(strVal)) else: outVal += strVal try: outVal = eval(outVal) except: gzSupport.addMessage("Error evaluating:" + outVal) gzSupport.showTraceback() gzSupport.addError("Error calculating field values:" + outVal) return outVal
def importLayer(cadPath, cadName, dataset): result = False try: name = dataset.getAttributeNode("targetName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue table = os.path.join(gzSupport.workspace, name) layerName = dataset.getAttributeNode("sourceName").nodeValue layer = os.path.join(cadPath, cadName, layerName) gzSupport.addMessage("Importing Layer " + layer) try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") xmlFields = dataset.getElementsByTagName("Field") gzSupport.addMessage("Where " + whereClause) if not arcpy.Exists(table): err = "Feature Class " + name + " does not exist" gzSupport.addError(err) gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, name, err) return False if whereClause != "": view = gzSupport.makeFeatureView(gzSupport.workspace, layer, layerName + "_View", whereClause, xmlFields) else: view = layer count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source Features for " + name) if hasJoinTo(dataset) == True: res = joinToCsv(view, dataset, cadPath, cadName) result = res[0] view = res[1] else: view = view result = True if result == True and count > 0: arcpy.Append_management([view], table, "NO_TEST", "", "") arcpy.ClearWorkspaceCache_management(gzSupport.workspace) except: err = "Failed to import layer " + name gzSupport.addError(err) gzSupport.showTraceback() gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, layerName, err) gzSupport.cleanupGarbage() try: del view except: gzSupport.addMessage("") return result
def OpenBrowserURL(xmlFileName): global url xmlDoc = xml.dom.minidom.parse(xmlFileName) xmlStrGizinta = xmlDoc.toxml() dsNode = xmlDoc.getElementsByTagName("Dataset")[0] target = dsNode.getAttributeNode("name").nodeValue theData = [('gizinta', xmlStrGizinta), ('target', target), ('automap', automap)] params = urllib.urlencode(theData) setupProxy() f = urllib2.urlopen(url, params) fileName = f.read() gzSupport.addMessage(fileName) if fileName.find(">Warning<") == -1 and fileName.find(">Error<") == -1: url = 'http://www.gizinta.com/fields/gizinta.html?target=' + fileName webbrowser.open(url, new=2) else: gzSupport.addMessage( "An error occurred interacting with gizinta.com. Please check your network connection and error messages printed above" )
def runDatasetChecks(dataset,table,qaRulesDataset): qaRules = qaRulesDataset.split(",") success = True for rule in qaRules: if rule == "RepairGeometry": i = 0 count = 1 gzSupport.addMessage("Running " + rule + " for " + table) while i < 3 and count > 0: arcpy.RepairGeometry_management(table) count = checkGeometry(table) i += 1 if count > 0: err = str(count) + " Geometry Errors found after repairing " + str(i) + " times" gzSupport.addError(err) gzSupport.logProcessError(table,rule,rule,str(count),err) success = False else: gzSupport.addMessage("Geometry successfully repaired") elif rule == "CheckGeometry": gzSupport.addMessage("Running " + rule + " for " + table) count = checkGeometry(table) if count > 0: success = False gzSupport.logProcessError(table,rule,rule,str(count),"Geometry Errors Found") return success
def exportDataset(sourceLayer, targetName, dataset): result = True targetTable = os.path.join(gzSupport.workspace, targetName) gzSupport.addMessage("Exporting Layer from " + sourceLayer) whereClause = "" try: try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") except: whereClause = '' gzSupport.addMessage("Where '" + whereClause + "'") sourceName = sourceLayer[sourceLayer.rfind(os.sep) + 1:sourceLayer.lower().rfind(".lyr")] viewName = sourceName + "_View" xmlFields = xmlDoc.getElementsByTagName("Field") view = gzSupport.makeFeatureViewForLayer(gzSupport.workspace, sourceLayer, viewName, whereClause, xmlFields) count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source rows") arcpy.FeatureClassToFeatureClass_conversion(view, gzSupport.workspace, targetName) except: err = "Failed to create new dataset " + targetName gzSupport.showTraceback() gzSupport.addError(err) gzSupport.logProcessError(sourceLayer, gzSupport.sourceIDField, sourceLayer, targetName, err) result = False return result
def exportDataset(sourceLayer, targetName, dataset): result = True targetTable = os.path.join(gzSupport.workspace, targetName) gzSupport.addMessage("Exporting Layer from " + sourceLayer) whereClause = "" try: try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") except: whereClause = "" gzSupport.addMessage("Where '" + whereClause + "'") sourceName = sourceLayer[sourceLayer.rfind(os.sep) + 1 : sourceLayer.lower().rfind(".lyr")] viewName = sourceName + "_View" xmlFields = xmlDoc.getElementsByTagName("Field") view = gzSupport.makeFeatureViewForLayer(gzSupport.workspace, sourceLayer, viewName, whereClause, xmlFields) count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source rows") arcpy.FeatureClassToFeatureClass_conversion(view, gzSupport.workspace, targetName) except: err = "Failed to create new dataset " + targetName gzSupport.showTraceback() gzSupport.addError(err) gzSupport.logProcessError(sourceLayer, gzSupport.sourceIDField, sourceLayer, targetName, err) result = False return result
def doInlineAppend(source,target): # perform the append from a source table to a target table success = False if arcpy.Exists(target): gzSupport.addMessage("Deleting rows from " + target) arcpy.DeleteRows_management(target) gzSupport.addMessage("Appending " + source + " TO " + target) arcpy.Append_management(source,target, "NO_TEST") success = True if debug: gzSupport.addMessage("completed") else: gzSupport.addMessage("Target: " + target + " does not exist") success = False return success
def doInlineAppend(source, target): # perform the append from a source table to a target table success = False if arcpy.Exists(target): gzSupport.addMessage("Deleting rows from " + target) arcpy.DeleteRows_management(target) gzSupport.addMessage("Appending " + source + " TO " + target) arcpy.Append_management(source, target, "NO_TEST") success = True if debug: gzSupport.addMessage("completed") else: gzSupport.addMessage("Target: " + target + " does not exist") success = False return success
def doTruncate(target): # perform the append from a source table to a target table success = False try: if arcpy.Exists(target): gzSupport.addMessage("Deleting rows in " + target) arcpy.DeleteRows_management(target) success = True if debug: gzSupport.addMessage("Deleted") else: gzSupport.addMessage("Target: " + target + " does not exist") gzSupport.cleanupGarbage() except: gzSupport.addMessage("Unable to delete rows for: " + target ) # assume this is a view or something that can't be deleted if only some things are not deleted. return success
def doTruncate(target): # perform the append from a source table to a target table success = False if arcpy.Exists(target): gzSupport.addMessage("Deleting rows in " + target) arcpy.DeleteRows_management(target) success = True if debug: gzSupport.addMessage("Deleted") else: gzSupport.addMessage("Target: " + target + " does not exist") return success
def checkGeometry(table): try: errTable = table + "_Check" if arcpy.Exists(errTable): arcpy.Delete_management(errTable) gzSupport.addMessage("Deleted existing " + errTable) arcpy.CheckGeometry_management(table,errTable) count = int(arcpy.GetCount_management(errTable).getOutput(0)) if count == 0: gzSupport.addMessage("No Geometry Errors found") arcpy.Delete_management(errTable) else: gzSupport.addMessage(str(count) + " Errors located in " + errTable) except: gzSupport.showTraceback() gzSupport.addMessage("Unable to perform geometry check, see error listed above") count = 0 return count
def doAppend(source,target): # perform the append from a source table to a target table success = False if arcpy.Exists(target): gzSupport.addMessage("Appending " + source + " |TO| " + target) arcpy.Append_management(source,target, "NO_TEST") success = True if debug: gzSupport.addMessage("completed") else: gzSupport.addMessage("Target: " + target + " does not exist") gzSupport.cleanupGarbage() return success
def checkGeometry(table): try: errTable = table + "_Check" if arcpy.Exists(errTable): arcpy.Delete_management(errTable) gzSupport.addMessage("Deleted existing " + errTable) arcpy.CheckGeometry_management(table, errTable) count = int(arcpy.GetCount_management(errTable).getOutput(0)) if count == 0: gzSupport.addMessage("No Geometry Errors found") arcpy.Delete_management(errTable) else: gzSupport.addMessage(str(count) + " Errors located in " + errTable) except: gzSupport.showTraceback() gzSupport.addMessage( "Unable to perform geometry check, see error listed above") count = 0 return count
def checkGeometry(table): errTable = table + "_Check" if arcpy.Exists(errTable): arcpy.Delete_management(errTable) gzSupport.addMessage("Deleted existing " + errTable) arcpy.CheckGeometry_management(table,errTable) count = int(arcpy.GetCount_management(errTable).getOutput(0)) if count == 0: gzSupport.addMessage("No Geometry Errors found") arcpy.Delete_management(errTable) else: gzSupport.addMessage(str(count) + " Errors located in " + errTable) return count
def writeDocument(sourceDataset, targetDataset, xmlFileName): desc = arcpy.Describe(sourceDataset) descT = arcpy.Describe(targetDataset) gzSupport.addMessage(sourceDataset) xmlDoc = Document() root = xmlDoc.createElement('Gizinta') xmlDoc.appendChild(root) root.setAttribute("logTableName", 'gzLog') root.setAttribute("errorTableName", 'gzError') root.setAttribute("version", '2013.1') root.setAttribute("xmlns:gizinta", 'http://gizinta.com') extract = xmlDoc.createElement("Extract") root.appendChild(extract) dataElementName = getExtractElementName(desc, sourceDataset) source = xmlDoc.createElement(dataElementName) sourceName = getName(desc, sourceDataset) targetName = getName(descT, targetDataset) setDefaultProperties(source, dataElementName, sourceDataset, sourceName, targetName) where = xmlDoc.createElement("WhereClause") source.appendChild(where) extract.appendChild(source) transform = xmlDoc.createElement("Transform") root.appendChild(transform) dataset = xmlDoc.createElement("Dataset") transform.appendChild(dataset) dataset.setAttribute("name", targetName) dataset.setAttribute("qa", "CheckFields,CheckGeometry") dataset.setAttribute("sourceIDField", "") dataset.setAttribute("sourceNameField", "") fields = getFields(descT, targetDataset) sourceFields = getFields(desc, sourceDataset) sourceNames = [ field.name[field.name.rfind(".") + 1:] for field in sourceFields ] i = 0 try: for field in fields: fNode = xmlDoc.createElement("Field") dataset.appendChild(fNode) fieldName = field.name[field.name.rfind(".") + 1:] if fieldName in sourceNames: addFieldElement(xmlDoc, fNode, "SourceName", fieldName) else: addFieldElement(xmlDoc, fNode, "SourceName", "*" + fieldName + "*") addFieldElement(xmlDoc, fNode, "TargetName", fieldName) addFieldElement(xmlDoc, fNode, "Method", "Copy") addFieldElement(xmlDoc, fNode, "FieldType", field.type) addFieldElement(xmlDoc, fNode, "FieldLength", str(field.length)) i += 1 setSourceFields(xmlDoc, dataset, sourceNames) # Should add a template section for value maps, maybe write domains... xmlStr = xmlDoc.toprettyxml() uglyXml = xmlDoc.toprettyxml(indent=' ', encoding="utf-8") text_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL) prettyXml = text_re.sub('>\g<1></', uglyXml) fHandle = open(xmlFileName, 'w') fHandle.write(prettyXml) fHandle.close() except: gzSupport.showTraceback() xmlStr = "" return xmlStr
def main(argv = None): # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name arcpy.AddToolbox(os.path.join(os.path.dirname(sys.path[0]),"Gizinta.tbx")) success = True try: gzSupport.addMessage("Getting list of datasets for Target " + targetGDB) targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] gzSupport.addMessage("Getting list of datasets for Source " + sourceGDB) sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] t = 0 arcpy.SetProgressor("Step","Creating Files...",0,len(tNames),1) for name in tNames: arcpy.SetProgressorPosition(t) arcpy.SetProgressorLabel("Creating file for " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names s = sNames.index(name) except: # will get here if no match s = -1 if s > -1: # create file if there is a match fileName = outputFolder + os.sep + prefixStr + name.title() + ".xml" if os.path.exists(fileName): os.remove(fileName) try: arcpy.gzCreateProject_gizinta(sFullNames[s],tFullNames[t],fileName) retVal = True gzSupport.addMessage("Created " + fileName) except: retVal = False if retVal == False: gzSupport.addMessage("Failed to create file for " + name) gzSupport.showTraceback() success = False else: gzSupport.addMessage("Skipping " + name) t = t + 1 except: gzSupport.showTraceback() arcpy.AddError(pymsg) success = False finally: arcpy.ResetProgressor() arcpy.SetParameter(gzSupport.successParameterNumber, success) arcpy.env.workspace = targetGDB arcpy.RefreshCatalog(outputFolder) gzSupport.closeLog()