def main(argv = None): # main function - list the datasets and delete rows success = True try: names = gzSupport.listDatasets(sourceGDB) tNames = names[0] tFullNames = names[1] arcpy.SetProgressor("Step","Deleting rows...",0,len(tFullNames),1) i = 0 for name in tFullNames: arcpy.SetProgressorPosition(i) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each full name if len(datasetNames) == 0 or tNames[i].upper() in datasetNames: retVal = doTruncate(name) gzSupport.logDatasetProcess(name,"deleteRowsGDB",retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + tNames[i]) i += i except: gzSupport.showTraceback() gzSupport.addError(pymsg) success = False gzSupport.logDatasetProcess(name,"deleteRowsGDB",success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(sourceGDB)
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = gzSupport.getFullName(name,tNames, tFullNames) #table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") name = '' try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False fieldName = gzSupport.getNodeValue(field,"TargetName") if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False try: gzSupport.logDatasetProcess(name,fieldName,retVal) except: gzSupport.addMessage("Process not logged for field") arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess("sourceTargetQA",name,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log file tools\\log\\sourceTargetQA.log for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def main(argv = None): # main function - list the datasets and delete rows success = True name = '' gzSupport.workspace = sourceGDB try: if len(datasetNames) == 0: names = gzSupport.listDatasets(sourceGDB) tNames = names[0] else: tNames = datasetNames arcpy.SetProgressor("Step","Deleting rows...",0,len(tNames),1) i = 0 for name in tNames: arcpy.SetProgressorPosition(i) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each full name if len(datasetNames) == 0 or gzSupport.nameTrimmer(name.upper()) in datasetNames: retVal = doTruncate(os.path.join(sourceGDB,name)) gzSupport.logDatasetProcess("deleteRowsGDB",name,retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) i = i + i except: gzSupport.showTraceback() gzSupport.addError("Failed to delete rows") success = False gzSupport.logDatasetProcess("deleteRowsGDB",name,success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(sourceGDB)
def checkValueMaps(dataset,table,field,fieldName,mapName): global valueMaps method = gzSupport.getNodeValue(field,"Method") success = True if method == "ValueMap": fieldMapName = gzSupport.getNodeValue(field,"ValueMapName") otherwise = gzSupport.getNodeValue(field,"ValueMapOtherwise") found = False for map in valueMaps: mapNodeName = map.getAttributeNode("name").nodeValue if mapNodeName == fieldMapName and not found: found = True # it is possible for the same value map to be present in multiple gizinta project files, just use the first one. mapValues = gzSupport.getNodeValue(map,mapName).split(",") if otherwise != None and otherwise != '' and otherwise not in mapValues and not otherwise.count(" ") > 2: mapValues.append(otherwise) values = gzSupport.getFieldValues("Unique",[fieldName],[dataset]) uniqueValues = values[0] #delta = len(uniqueValues[0]) - len(mapValues) mismatch = [] for uVal in uniqueValues: if uVal not in mapValues: mismatch.append(uVal) if len(mismatch) > 0 and not otherwise.count(" ") > 2: gzSupport.addError(str(len(mismatch)) + " mismatches for " + fieldName + ", results located in " + gzSupport.errorTableName) for uVal in mismatch: gzSupport.addError("'" + str(uVal) + "' not found in value map " + str(fieldMapName)) gzSupport.logProcessError(table,gzSupport.sourceIDField,"",fieldName,"Mismatched Value Map:" + str(uVal)) success = False elif len(mismatch) == 0: gzSupport.addMessage("No mismatches found for ValueMaps") return success
def runDatasetChecks(dataset,table,qaRulesDataset): qaRules = qaRulesDataset.split(",") success = True for rule in qaRules: if rule == "RepairGeometry": i = 0 count = 1 gzSupport.addMessage("Running " + rule + " for " + table) while i < 3 and count > 0: arcpy.RepairGeometry_management(table) count = checkGeometry(table) i += 1 if count > 0: err = str(count) + " Geometry Errors found after repairing " + str(i) + " times" gzSupport.addError(err) gzSupport.logProcessError(table,rule,rule,str(count),err) success = False else: gzSupport.addMessage("Geometry successfully repaired") elif rule == "CheckGeometry": gzSupport.addMessage("Running " + rule + " for " + table) count = checkGeometry(table) if count > 0: success = False gzSupport.logProcessError(table,rule,rule,str(count),"Geometry Errors Found") return success
def main(argv=None): # main function - list the datasets and delete rows success = True try: names = gzSupport.listDatasets(sourceGDB) tNames = names[0] tFullNames = names[1] arcpy.SetProgressor("Step", "Deleting rows...", 0, len(tFullNames), 1) i = 0 for name in tFullNames: arcpy.SetProgressorPosition(i) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each full name if len(datasetNames) == 0 or tNames[i].upper() in datasetNames: retVal = doTruncate(name) gzSupport.logDatasetProcess(name, "deleteRowsGDB", retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + tNames[i]) i += i except: gzSupport.showTraceback() gzSupport.addError(pymsg) success = False gzSupport.logDatasetProcess(name, "deleteRowsGDB", success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(sourceGDB)
def checkValueMaps(dataset,table,field,fieldName,mapName): global valueMaps method = gzSupport.getNodeValue(field,"Method") success = True if method == "ValueMap": fieldMapName = gzSupport.getNodeValue(field,"ValueMapName") otherwise = gzSupport.getNodeValue(field,"ValueMapOtherwise") for map in valueMaps: mapNodeName = map.getAttributeNode("name").nodeValue if mapNodeName == fieldMapName: mapValues = gzSupport.getNodeValue(map,mapName).split(",") if otherwise != None and otherwise != '' and otherwise not in mapValues and not otherwise.count(" ") > 2: mapValues.append(otherwise) values = gzSupport.getFieldValues("Unique",[fieldName],[dataset]) uniqueValues = values[0] #delta = len(uniqueValues[0]) - len(mapValues) mismatch = [] for uVal in uniqueValues: if uVal not in mapValues: mismatch.append(uVal) if len(mismatch) > 0 and not otherwise.count(" ") > 2: gzSupport.addError(str(len(mismatch)) + " mismatches for " + fieldName + ", results located in " + gzSupport.errorTableName) for uVal in mismatch: gzSupport.addError("'" + str(uVal) + "' not found in value map " + str(fieldMapName)) gzSupport.logProcessError(table,gzSupport.sourceIDField,"",fieldName,"Mismatched Value Map:" + str(uVal)) success = False elif len(mismatch) == 0: gzSupport.addMessage("No mismatches found for ValueMaps") return success
def exportDataset(sourceLayer, targetName, dataset): result = True targetTable = os.path.join(gzSupport.workspace, targetName) gzSupport.addMessage("Exporting Layer from " + sourceLayer) whereClause = "" try: try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") except: whereClause = '' gzSupport.addMessage("Where '" + whereClause + "'") sourceName = sourceLayer[sourceLayer.rfind(os.sep) + 1:sourceLayer.lower().rfind(".lyr")] viewName = sourceName + "_View" xmlFields = xmlDoc.getElementsByTagName("Field") view = gzSupport.makeFeatureViewForLayer(gzSupport.workspace, sourceLayer, viewName, whereClause, xmlFields) count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source rows") arcpy.FeatureClassToFeatureClass_conversion(view, gzSupport.workspace, targetName) except: err = "Failed to create new dataset " + targetName gzSupport.showTraceback() gzSupport.addError(err) gzSupport.logProcessError(sourceLayer, gzSupport.sourceIDField, sourceLayer, targetName, err) result = False return result
def exportDataset(sourceLayer, targetName, dataset): result = True targetTable = os.path.join(gzSupport.workspace, targetName) gzSupport.addMessage("Exporting Layer from " + sourceLayer) whereClause = "" try: try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") except: whereClause = "" gzSupport.addMessage("Where '" + whereClause + "'") sourceName = sourceLayer[sourceLayer.rfind(os.sep) + 1 : sourceLayer.lower().rfind(".lyr")] viewName = sourceName + "_View" xmlFields = xmlDoc.getElementsByTagName("Field") view = gzSupport.makeFeatureViewForLayer(gzSupport.workspace, sourceLayer, viewName, whereClause, xmlFields) count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source rows") arcpy.FeatureClassToFeatureClass_conversion(view, gzSupport.workspace, targetName) except: err = "Failed to create new dataset " + targetName gzSupport.showTraceback() gzSupport.addError(err) gzSupport.logProcessError(sourceLayer, gzSupport.sourceIDField, sourceLayer, targetName, err) result = False return result
def importLayer(cadPath, cadName, dataset): result = False try: name = dataset.getAttributeNode("targetName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue table = os.path.join(gzSupport.workspace, name) layerName = dataset.getAttributeNode("sourceName").nodeValue layer = os.path.join(cadPath, cadName, layerName) gzSupport.addMessage("Importing Layer " + layer) try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") xmlFields = dataset.getElementsByTagName("Field") gzSupport.addMessage("Where " + whereClause) if not arcpy.Exists(table): err = "Feature Class " + name + " does not exist" gzSupport.addError(err) gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, name, err) return False if whereClause != '': view = gzSupport.makeFeatureView(gzSupport.workspace, layer, layerName + "_View", whereClause, xmlFields) else: view = layer count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source Features for " + name) if hasJoinTo(dataset) == True: res = joinToCsv(view, dataset, cadPath, cadName) result = res[0] view = res[1] else: view = view result = True if result == True and count > 0: arcpy.Append_management([view], table, "NO_TEST", "", "") arcpy.ClearWorkspaceCache_management(gzSupport.workspace) except: err = "Failed to import layer " + name gzSupport.addError(err) gzSupport.showTraceback() gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, layerName, err) gzSupport.cleanupGarbage() try: del view except: gzSupport.addMessage("") return result
def importLayer(cadPath, cadName, dataset): result = False try: name = dataset.getAttributeNode("targetName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue table = os.path.join(gzSupport.workspace, name) layerName = dataset.getAttributeNode("sourceName").nodeValue layer = os.path.join(cadPath, cadName, layerName) gzSupport.addMessage("Importing Layer " + layer) try: whereClause = gzSupport.getNodeValue(dataset, "WhereClause") xmlFields = dataset.getElementsByTagName("Field") gzSupport.addMessage("Where " + whereClause) if not arcpy.Exists(table): err = "Feature Class " + name + " does not exist" gzSupport.addError(err) gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, name, err) return False if whereClause != "": view = gzSupport.makeFeatureView(gzSupport.workspace, layer, layerName + "_View", whereClause, xmlFields) else: view = layer count = arcpy.GetCount_management(view).getOutput(0) gzSupport.addMessage(str(count) + " source Features for " + name) if hasJoinTo(dataset) == True: res = joinToCsv(view, dataset, cadPath, cadName) result = res[0] view = res[1] else: view = view result = True if result == True and count > 0: arcpy.Append_management([view], table, "NO_TEST", "", "") arcpy.ClearWorkspaceCache_management(gzSupport.workspace) except: err = "Failed to import layer " + name gzSupport.addError(err) gzSupport.showTraceback() gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, layerName, err) gzSupport.cleanupGarbage() try: del view except: gzSupport.addMessage("") return result
def getCountNullBlank(table,field,extraExpr): whereClause = "\"" + field + "\" is Null " + extraExpr success = True desc = arcpy.Describe(os.path.join(gzSupport.workspace,table)) viewName = gzSupport.makeView(desc.dataElementType,gzSupport.workspace,table,"temp_"+field,whereClause,[]) count = int(arcpy.GetCount_management(viewName).getOutput(0)) if count > 0: gzSupport.addError(str(count) + " Null field values found") success = False else: gzSupport.addMessage("No Null field values found") return success
def getCountNullBlank(table,field,extraExpr): whereClause = "\"" + field + "\" is Null " + extraExpr success = True viewName = gzSupport.makeFeatureView(gzSupport.workspace,table,"temp_"+field,whereClause) count = int(arcpy.GetCount_management(viewName).getOutput(0)) if count > 0: gzSupport.addError(str(count) + " Null field values found") success = False else: gzSupport.addMessage("No Null field values found") return success
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False gzSupport.logDatasetProcess(name,fieldName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess(name,"",False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def getCountNullBlank(table, field, extraExpr): whereClause = "\"" + field + "\" is Null " + extraExpr success = True desc = arcpy.Describe(os.path.join(gzSupport.workspace, table)) viewName = gzSupport.makeView(desc.dataElementType, gzSupport.workspace, table, "temp_" + field, whereClause, []) count = int(arcpy.GetCount_management(viewName).getOutput(0)) if count > 0: gzSupport.addError(str(count) + " Null field values found") success = False else: gzSupport.addMessage("No Null field values found") return success
def main(argv=None): # main function - list the source and target datasets, then append where there is a match on non-prefixed name success = True arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] s = 0 arcpy.SetProgressor("Step", "Appending rows...", 0, len(sFullNames), 1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names t = tNames.index(name) except: # will get here if no match t = -1 if t > -1: # append if there is a match if len(datasetNames) == 0 or name.upper() in datasetNames: retVal = doAppend(sFullNames[s], tFullNames[t]) gzSupport.logDatasetProcess(name, "appendAlltoGDB", retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + name) s = s + 1 except: gzSupport.showTraceback() gzSupport.addError(pymsg) success = False gzSupport.logDatasetProcess(name, "appendAlltoGDB", success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(targetGDB)
def main(argv = None): # main function - list the source and target datasets, then append where there is a match on non-prefixed name success = True arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] s = 0 arcpy.SetProgressor("Step","Appending rows...",0,len(sFullNames),1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names t = tNames.index(name) except: # will get here if no match t = -1 if t > -1: # append if there is a match if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames: retVal = doAppend(sFullNames[s],tFullNames[t]) gzSupport.logDatasetProcess(name,"appendAlltoGDB",retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) s = s + 1 except: gzSupport.showTraceback() gzSupport.addError("Unable to append datasets") success = False gzSupport.logDatasetProcess(name,"appendAlltoGDB",success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(targetGDB)
def main(argv = None): # main function - list the source and target datasets, then append where there is a match on non-prefixed name success = True name = '' arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: if len(datasetNames) == 0: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] else: sNames = datasetNames s = 0 arcpy.SetProgressor("Step","Appending rows...",0,len(sNames),1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Appending rows in " + name + "...") # for each source name if debug: gzSupport.addMessage(name) target = os.path.join(targetGDB,name) if arcpy.Exists(target): # append if there is a match if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames: retVal = doAppend(os.path.join(sourceGDB,name),target) gzSupport.logDatasetProcess("appendAlltoGDB",name,retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) s = s + 1 except: gzSupport.showTraceback() gzSupport.addError("Unable to append datasets") success = False gzSupport.logDatasetProcess("appendAlltoGDB",name,success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(targetGDB)
def findDuplicates(dataset,table,field): success = True uValues = gzSupport.getFieldValues("Unique",[field],[dataset]) uniqueValues = uValues[0] diffValues = uValues[1] fieldValues = gzSupport.getFieldValues("All",[field],[dataset])[0] delta = len(diffValues) if delta > 0: gzSupport.addMessage(str(len(fieldValues)) + " All : " + str(len(uniqueValues)) + " Unique") gzSupport.addError(str(delta) + " Duplicates found, results located in " + gzSupport.errorTableName) for x in diffValues: gzSupport.logProcessError(table,field,str(x),field,"Duplicate Value:" + str(x)) success = False elif delta == 0: gzSupport.addMessage("No Duplicates found") return success
def findDuplicates(dataset,table,field): success = True uValues = gzSupport.getFieldValues("Unique",[field],[dataset]) uniqueValues = uValues[0] diffValues = uValues[1] #fieldValues = gzSupport.getFieldValues("All",[field],[dataset])[0] delta = len(diffValues) if delta > 0: count = int(arcpy.GetCount_management(table).getOutput(0)) gzSupport.addMessage(str(count) + " rows : " + str(len(uniqueValues)) + " Unique") gzSupport.addError(str(delta) + " Duplicates found, results located in " + gzSupport.errorTableName) for x in diffValues: gzSupport.logProcessError(table,field,str(x),field,"Duplicate Value:" + str(x)) success = False elif delta == 0: gzSupport.addMessage("No Duplicates found") return success
def calcValue(row, attrs, calcString): # calculate a value based on fields and or other expressions if calcString.find("|") > -1: calcList = calcString.split("|") else: calcList = calcString.split("!") outVal = "" for strVal in calcList: if strVal in attrs: outVal += str(row.getValue(strVal)) else: outVal += strVal try: outVal = eval(outVal) except: gzSupport.addMessage("Error evaluating:" + outVal) gzSupport.showTraceback() gzSupport.addError("Error calculating field values:" + outVal) return outVal
def calcValue(row,attrs,calcString): # calculate a value based on fields and or other expressions if calcString.find("|") > -1: calcList = calcString.split("|") else: calcList = calcString.split("!") outVal = "" for strVal in calcList: if strVal in attrs: outVal += str(row.getValue(strVal)) else: outVal += strVal try: outVal = eval(outVal) except: gzSupport.addMessage("Error evaluating:" + outVal) gzSupport.showTraceback() gzSupport.addError("Error calculating field values:" + outVal) return outVal
def main(argv = None): success = True targetName = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Datasets...", 0,progBar, 1) #gzSupport.deleteExistingRows(datasets) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName,"Field") arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...") if not arcpy.Exists(os.path.join(sourceWorkspace,sourceName)): gzSupport.addError(os.path.join(sourceWorkspace,sourceName + " does not exist, exiting")) return if not arcpy.Exists(os.path.join(gzSupport.workspace,targetName)): gzSupport.addMessage(os.path.join(gzSupport.workspace,targetName) + " does not exist") else: arcpy.Delete_management(os.path.join(gzSupport.workspace,targetName)) arcpy.env.Workspace = gzSupport.workspace try: retVal = gzSupport.exportDataset(sourceWorkspace,sourceName,targetName,dataset, xmlFields) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName,targetName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("A Fatal Error occurred") success = False gzSupport.logDatasetProcess("extractWorkspaceToGDB",targetName,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) try: arcpy.ClearWorkspaceCache_management(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(sourceWorkspace) except: gzSupport.addMessage("Unable to clear workspace cache, continuing") if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv=None): success = True name = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Layers...", 0, progBar, 1) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") if not arcpy.Exists(sourceLayer): gzSupport.addError("Layer " + sourceLayer + " does not exist, exiting") return target = os.path.join(gzSupport.workspace, targetName) arcpy.env.Workspace = gzSupport.workspace if not arcpy.Exists(target): gzSupport.addMessage("Feature Class " + target + " does not exist") else: arcpy.Delete_management(target) try: retVal = exportDataset(sourceLayer, targetName, dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName, targetName, retVal) arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("extractLayerToGDB", name, False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv=None): success = True gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] name = '' for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue table = gzSupport.getFullName(name, tNames, tFullNames) gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue gzSupport.sourceNameField = dataset.getAttributeNode( "sourceNameField").nodeValue if not arcpy.Exists(table): gzSupport.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dataset.getElementsByTagName("Field") try: attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = gzSupport.workspace targetName = gzSupport.getNodeValue(field, "TargetName") gzSupport.addGizintaField(table, targetName, field, attrs) retVal = setFieldValues(table, fields) if retVal == False: success = False gzSupport.logDatasetProcess(name, "Fields", retVal) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("fieldCalculator", name, False) finally: arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information" ) if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def joinToCsv(view, dataset, cadPath, cadName): retVal = False joinTo = "" if hasJoinTo(dataset) == True: try: joinTo = dataset.getAttributeNode("joinTo").nodeValue cadPart0 = cadName.split(".dwg")[0] csvFile = os.path.join(cadPath, cadPart0, cadPart0 + joinTo) if joinTo and joinTo != "": cadKey = dataset.getAttributeNode("cadKey").nodeValue csvKey = dataset.getAttributeNode("csvKey").nodeValue prefix = dataset.getAttributeNode("fieldPrefix").nodeValue tempTable = os.path.join(gzSupport.workspace, prefix) # Create temporary table if arcpy.Exists(tempTable): arcpy.Delete_management(tempTable) if os.path.isfile(csvFile) == True: arcpy.CopyRows_management(csvFile, tempTable) arcpy.AddJoin_management(view, cadKey, tempTable, csvKey) retVal = True else: err = "Missing csv file - " + csvFile gzSupport.addError(err) gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, csvFile, err) retVal = False except: err = "Unable to create join for " + name + ", " + csvFile gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, csvFile, err) gzSupport.addError(err) gzSupport.showTraceback() retVal = False #finally: # if arcpy.Exists(tempTable): # arcpy.Delete_management(tempTable) return [retVal, view]
def joinToCsv(view, dataset, cadPath, cadName): retVal = False joinTo = "" if hasJoinTo(dataset) == True: try: joinTo = dataset.getAttributeNode("joinTo").nodeValue cadPart0 = cadName.split(".dwg")[0] csvFile = os.path.join(cadPath, cadPart0, cadPart0 + joinTo) if joinTo and joinTo != "": cadKey = dataset.getAttributeNode("cadKey").nodeValue csvKey = dataset.getAttributeNode("csvKey").nodeValue prefix = dataset.getAttributeNode("fieldPrefix").nodeValue tempTable = os.path.join(gzSupport.workspace, prefix) # Create temporary table if arcpy.Exists(tempTable): arcpy.Delete_management(tempTable) if os.path.isfile(csvFile) == True: arcpy.CopyRows_management(csvFile, tempTable) arcpy.AddJoin_management(view, cadKey, tempTable, csvKey) retVal = True else: err = "Missing csv file - " + csvFile gzSupport.addError(err) gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, csvFile, err) retVal = False except: err = "Unable to create join for " + name + ", " + csvFile gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, csvFile, err) gzSupport.addError(err) gzSupport.showTraceback() retVal = False # finally: # if arcpy.Exists(tempTable): # arcpy.Delete_management(tempTable) return [retVal, view]
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] name = '' for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue table = gzSupport.getFullName(name,tNames,tFullNames) gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue gzSupport.sourceNameField = dataset.getAttributeNode("sourceNameField").nodeValue if not arcpy.Exists(table): gzSupport.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dataset.getElementsByTagName("Field") try: attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = gzSupport.workspace targetName = gzSupport.getNodeValue(field,"TargetName") gzSupport.addGizintaField(table,targetName,field,attrs) retVal = setFieldValues(table,fields) if retVal == False: success = False gzSupport.logDatasetProcess(name,"Fields",retVal) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("fieldCalculator",name,False) finally: arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv = None): success = True name = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Layers...", 0,progBar, 1) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...") if not arcpy.Exists(sourceLayer): gzSupport.addError("Layer " + sourceLayer + " does not exist, exiting") return target = os.path.join(gzSupport.workspace,targetName) arcpy.env.Workspace = gzSupport.workspace if not arcpy.Exists(target): gzSupport.addMessage("Feature Class " + target + " does not exist") else: arcpy.Delete_management(target) try: retVal = exportDataset(sourceLayer,targetName,dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName,targetName,retVal) arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("extractLayerToGDB",name,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv = None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace success = True arcpy.ResetProgressor() arcpy.env.Workspace = gzSupport.workspace uniqueValues = gzSupport.getFieldValues("Unique",fieldNames,datasets)[0] sources = gzSupport.listDatasets(gzSupport.workspace) sNames = sources[0] sFullNames = sources[1] arcpy.SetProgressor("Step","Load by " + str(fieldNames) + "...",0,len(uniqueValues)*len(datasets),1) for value in uniqueValues: try: hasVersion = False gzSupport.addMessage(value) if desc.workspaceType == "RemoteDatabase" and versionName != None: arcpy.SetProgressorLabel("Creating Version " + versionName) hasVersion = gzSupport.createVersion(defaultWorkspace,defaultVersionName,versionName) if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase": arcpy.env.Workspace = targetWorkspace targets = gzSupport.listDatasets(targetWorkspace) tNames = targets[0] tFullNames = targets[1] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue arcpy.SetProgressorLabel("Loading Dataset " + name) targetTable = gzSupport.getFullName(name,tNames,tFullNames) sourceTable = gzSupport.getFullName(name,sNames,sFullNames) attrs = [f.name for f in arcpy.ListFields(targetTable)] expr = getExpression(attrs,fieldNames,value) arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr) tName = targetTable[targetTable.rfind("\\")+1:] tLocation = targetTable[0:targetTable.rfind("\\")] if gzSupport.deleteRows(tLocation,tName,expr) == True: retVal = gzSupport.appendRows(sourceTable,targetTable,expr) if retVal == False: success == False else: success == False arcpy.SetProgressorPosition() if success == True: if desc.workspaceType == "RemoteDatabase": arcpy.SetProgressorLabel("Reconcile and Post") retVal = gzSupport.reconcilePost(defaultWorkspace,versionName,defaultVersionName) if retVal == False: success = False gzSupport.deleteVersion(defaultWorkspace,versionName) elif desc.workspaceType == "LocalDatabase": arcpy.SetProgressorLabel("Completed Update for " + str(value)) gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) else: gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("Serious error",sys.argv[0],False) finally: arcpy.SetProgressorPosition() arcpy.ClearWorkspaceCache_management(defaultWorkspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True if desc.workspaceType == "RemoteDatabase" and success == True: analyze(defaultWorkspace,datasets,tNames,tFullNames) arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(defaultWorkspace) gzSupport.compressGDB(gzSupport.workspace) gzSupport.compressGDB(defaultWorkspace) gzSupport.closeLog() return
def main(argv=None): success = True if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: gzSupport.addMessage("Looking for drawings modified since " + since) minTime = datetime.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p") cadFiles = gzSupport.getFileList(cadFolder, cadExt, minTime) if len(cadFiles) > 0: progBar = len(cadFiles) + 1 arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1) arcpy.SetProgressorPosition() gzSupport.deleteExistingRows(datasets) for item in cadFiles: cadPath = item[0] cadName = item[1] gzSupport.addMessage("Importing Drawing " + cadName) for dataset in datasets: try: name = dataset.getAttributeNode("sourceName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName, "Field") arcpy.SetProgressorLabel("Loading " + name + " for " + cadName + "...") arcpy.env.Workspace = gzSupport.workspace targetName = dataset.getAttributeNode("targetName").nodeValue sourceWorkspace = os.path.join(cadPath, cadName) exists = False if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage(os.path.join(gzSupport.workspace, targetName) + " does not exist") else: exists = True # arcpy.Delete_management(os.path.join(gzSupport.workspace,targetName)) try: if not exists == True: retVal = gzSupport.exportDataset(sourceWorkspace, name, targetName, dataset, xmlFields) addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName) else: retVal = importLayer(cadPath, cadName, dataset) addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False arcpy.env.Workspace = gzSupport.workspace gzSupport.logDatasetProcess(cadName, name, retVal) gzSupport.cleanupGarbage() arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("", "", False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv=None): success = True if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: gzSupport.addMessage("Looking for drawings modified since " + since) minTime = dt.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p") cadFiles = getFileList(cadFolder, cadExt, minTime) if len(cadFiles) > 0: progBar = len(cadFiles) + 1 arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1) deleteExistingRows(datasets) arcpy.SetProgressorPosition() for item in cadFiles: cadPath = item[0] cadName = item[1] gzSupport.addMessage("Importing Drawing " + cadName) for dataset in datasets: try: name = dataset.getAttributeNode("sourceName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue arcpy.SetProgressorLabel("Loading " + name + " for " + cadName + "...") arcpy.env.Workspace = gzSupport.workspace targetName = dataset.getAttributeNode("targetName").nodeValue sourceWorkspace = os.path.join(cadPath, cadName) if not arcpy.Exists( os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage( os.path.join(gzSupport.workspace, targetName) + " does not exist") mode = "export" else: mode = "import" try: if mode == "import": retVal = gzSupport.importDataset( sourceWorkspace, name, targetName, dataset) elif mode == "export": retVal = gzSupport.exportDataset( sourceWorkspace, name, targetName, dataset) #retVal = importLayer(cadPath,cadName,dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False arcpy.env.Workspace = gzSupport.workspace gzSupport.logDatasetProcess(cadName, name, retVal) gzSupport.cleanupGarbage() arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("", "", False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
if currentValue == sourceTest or currentValue == sourceValue: # this will check numeric and non-numeric equivalency for current values in value maps found = True try: idx = sourceValues.index( sourceValue) newValue = targetValues[idx] row.setValue(targetName, newValue) except: errCount += 1 row.setValue( targetName, currentValue) success = False err = "Unable to map values for " + targetName + ", value = " + str( newValue) gzSupport.showTraceback() gzSupport.addError(err) gzSupport.logProcessError( row.getValue( gzSupport.sourceNameField), gzSupport.sourceIDField, row.getValue( gzSupport.sourceIDField), targetName, err) if not found: if otherwise and str(otherwise) != "None": otherwise = str(otherwise) if otherwise.count(" ") > 2 or otherwise.count( "!") > 1: otherwise = calcValue(row, attrs, otherwise) #gzSupport.addMessage(otherwise)
def main(argv=None): success = True targetName = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Datasets...", 0, progBar, 1) #gzSupport.deleteExistingRows(datasets) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName, "Field") arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") if not arcpy.Exists(os.path.join(sourceWorkspace, sourceName)): gzSupport.addError( os.path.join(sourceWorkspace, sourceName + " does not exist, exiting")) return if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage( os.path.join(gzSupport.workspace, targetName) + " does not exist") else: arcpy.Delete_management( os.path.join(gzSupport.workspace, targetName)) arcpy.env.Workspace = gzSupport.workspace try: retVal = gzSupport.exportDataset(sourceWorkspace, sourceName, targetName, dataset, xmlFields) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName, targetName, retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("A Fatal Error occurred") success = False gzSupport.logDatasetProcess("extractWorkspaceToGDB", targetName, False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) try: arcpy.ClearWorkspaceCache_management(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(sourceWorkspace) except: gzSupport.addMessage("Unable to clear workspace cache, continuing") if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv=None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace success = True arcpy.ResetProgressor() arcpy.env.Workspace = gzSupport.workspace uniqueValues = gzSupport.getFieldValues("Unique", fieldNames, datasets)[0] sources = gzSupport.listDatasets(gzSupport.workspace) sNames = sources[0] sFullNames = sources[1] arcpy.SetProgressor("Step", "Load by " + str(fieldNames) + "...", 0, len(uniqueValues) * len(datasets), 1) for value in uniqueValues: try: hasVersion = False gzSupport.addMessage(value) if desc.workspaceType == "RemoteDatabase" and versionName != None: arcpy.SetProgressorLabel("Creating Version " + versionName) hasVersion = gzSupport.createVersion(defaultWorkspace, defaultVersionName, versionName) if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase": arcpy.env.Workspace = targetWorkspace targets = gzSupport.listDatasets(targetWorkspace) tNames = targets[0] tFullNames = targets[1] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue arcpy.SetProgressorLabel("Loading Dataset " + name) targetTable = gzSupport.getFullName(name, tNames, tFullNames) sourceTable = gzSupport.getFullName(name, sNames, sFullNames) attrs = [f.name for f in arcpy.ListFields(targetTable)] expr = getExpression(attrs, fieldNames, value) arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr) tName = targetTable[targetTable.rfind("\\") + 1 :] tLocation = targetTable[0 : targetTable.rfind("\\")] if gzSupport.deleteRows(tLocation, tName, expr) == True: retVal = gzSupport.appendRows(sourceTable, targetTable, expr) if retVal == False: success == False else: success == False arcpy.SetProgressorPosition() if success == True: if desc.workspaceType == "RemoteDatabase": arcpy.SetProgressorLabel("Reconcile and Post") retVal = gzSupport.reconcilePost(defaultWorkspace, versionName, defaultVersionName) if retVal == False: success = False gzSupport.deleteVersion(defaultWorkspace, versionName) elif desc.workspaceType == "LocalDatabase": arcpy.SetProgressorLabel("Completed Update for " + str(value)) gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal) else: gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("Serious error", sys.argv[0], False) finally: arcpy.SetProgressorPosition() arcpy.ClearWorkspaceCache_management(defaultWorkspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True if desc.workspaceType == "RemoteDatabase" and success == True: analyze(defaultWorkspace, datasets, tNames, tFullNames) arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(defaultWorkspace) gzSupport.compressGDB(gzSupport.workspace) gzSupport.compressGDB(defaultWorkspace) gzSupport.closeLog() return
sourceTest = None if mapExpr and mapExpr != "": currentValue = calcValue(row,attrs,mapExpr) if currentValue == sourceTest or currentValue == sourceValue: # this will check numeric and non-numeric equivalency for current values in value maps found = True try: idx = sourceValues.index(sourceValue) newValue = targetValues[idx] row.setValue(targetName,newValue) except: errCount += 1 row.setValue(targetName,currentValue) success = False err = "Unable to map values for " + targetName + ", value = " + str(newValue) gzSupport.showTraceback() gzSupport.addError(err) gzSupport.logProcessError(row.getValue(gzSupport.sourceNameField),gzSupport.sourceIDField,row.getValue(gzSupport.sourceIDField),targetName,err) if not found: if otherwise and str(otherwise) != "None": otherwise = str(otherwise) if otherwise.count(" ") > 2 or otherwise.count("!") > 1: otherwise = calcValue(row,attrs,otherwise) #gzSupport.addMessage(otherwise) row.setValue(targetName,otherwise) else: errCount += 1 success = False err = "Unable to find map value (otherwise) for " + str(targetName) + ", value = " + str(currentValue) gzSupport.addError(err) gzSupport.logProcessError(row.getValue(gzSupport.sourceNameField),gzSupport.sourceIDField,row.getValue(gzSupport.sourceIDField),targetName,err)