def main(argv=None): success = True gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] name = '' for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue table = gzSupport.getFullName(name, tNames, tFullNames) gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue gzSupport.sourceNameField = dataset.getAttributeNode( "sourceNameField").nodeValue if not arcpy.Exists(table): gzSupport.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dataset.getElementsByTagName("Field") try: attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = gzSupport.workspace targetName = gzSupport.getNodeValue(field, "TargetName") gzSupport.addGizintaField(table, targetName, field, attrs) retVal = setFieldValues(table, fields) if retVal == False: success = False gzSupport.logDatasetProcess(name, "Fields", retVal) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("fieldCalculator", name, False) finally: arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information" ) if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv=None): success = True name = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Layers...", 0, progBar, 1) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") if not arcpy.Exists(sourceLayer): gzSupport.addError("Layer " + sourceLayer + " does not exist, exiting") return target = os.path.join(gzSupport.workspace, targetName) arcpy.env.Workspace = gzSupport.workspace if not arcpy.Exists(target): gzSupport.addMessage("Feature Class " + target + " does not exist") else: arcpy.Delete_management(target) try: retVal = exportDataset(sourceLayer, targetName, dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName, targetName, retVal) arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("extractLayerToGDB", name, False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv = None): success = True targetName = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Datasets...", 0,progBar, 1) #gzSupport.deleteExistingRows(datasets) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName,"Field") arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...") if not arcpy.Exists(os.path.join(sourceWorkspace,sourceName)): gzSupport.addError(os.path.join(sourceWorkspace,sourceName + " does not exist, exiting")) return if not arcpy.Exists(os.path.join(gzSupport.workspace,targetName)): gzSupport.addMessage(os.path.join(gzSupport.workspace,targetName) + " does not exist") else: arcpy.Delete_management(os.path.join(gzSupport.workspace,targetName)) arcpy.env.Workspace = gzSupport.workspace try: retVal = gzSupport.exportDataset(sourceWorkspace,sourceName,targetName,dataset, xmlFields) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName,targetName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("A Fatal Error occurred") success = False gzSupport.logDatasetProcess("extractWorkspaceToGDB",targetName,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) try: arcpy.ClearWorkspaceCache_management(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(sourceWorkspace) except: gzSupport.addMessage("Unable to clear workspace cache, continuing") if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = gzSupport.getFullName(name,tNames, tFullNames) #table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") name = '' try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False fieldName = gzSupport.getNodeValue(field,"TargetName") if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False try: gzSupport.logDatasetProcess(name,fieldName,retVal) except: gzSupport.addMessage("Process not logged for field") arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess("sourceTargetQA",name,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log file tools\\log\\sourceTargetQA.log for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] name = '' for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue table = gzSupport.getFullName(name,tNames,tFullNames) gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue gzSupport.sourceNameField = dataset.getAttributeNode("sourceNameField").nodeValue if not arcpy.Exists(table): gzSupport.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dataset.getElementsByTagName("Field") try: attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = gzSupport.workspace targetName = gzSupport.getNodeValue(field,"TargetName") gzSupport.addGizintaField(table,targetName,field,attrs) retVal = setFieldValues(table,fields) if retVal == False: success = False gzSupport.logDatasetProcess(name,"Fields",retVal) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("fieldCalculator",name,False) finally: arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv = None): success = True name = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Layers...", 0,progBar, 1) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...") if not arcpy.Exists(sourceLayer): gzSupport.addError("Layer " + sourceLayer + " does not exist, exiting") return target = os.path.join(gzSupport.workspace,targetName) arcpy.env.Workspace = gzSupport.workspace if not arcpy.Exists(target): gzSupport.addMessage("Feature Class " + target + " does not exist") else: arcpy.Delete_management(target) try: retVal = exportDataset(sourceLayer,targetName,dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName,targetName,retVal) arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("extractLayerToGDB",name,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False gzSupport.logDatasetProcess(name,fieldName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess(name,"",False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def main(argv = None): # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name success = True try: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] s = 0 arcpy.SetProgressor("Step","Replacing rows...",0,len(sFullNames),1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Replacing rows using " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names t = tNames.index(name) except: # will get here if no match t = -1 if t > -1: # append if there is a match if len(datasetNames) == 0 or name.upper() in datasetNames: retVal = doInlineAppend(sFullNames[s],tFullNames[t]) gzSupport.logDatasetProcess(name,"replaceRows",retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + name) s = s + 1 except: gzSupport.showTraceback() arcpy.AddError(pymsg) success = False gzSupport.logDatasetProcess(name,"replaceRows",success) finally: arcpy.ResetProgressor() arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(targetGDB) gzSupport.compressGDB(targetGDB) gzSupport.closeLog()
def main(argv = None): # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name success = True name = '' try: if len(datasetNames) == 0: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] else: sNames = datasetNames s = 0 arcpy.SetProgressor("Step","Replacing rows...",0,len(sNames),1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Replacing rows using " + name + "...") # for each source name if debug: gzSupport.addMessage(name) target = os.path.join(targetGDB,name) if arcpy.Exists(target): # append if there is a match if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames: retVal = doInlineAppend(os.path.join(sourceGDB,name),target) gzSupport.logDatasetProcess("replaceRows",name,retVal) if retVal == False: success = False gzSupport.cleanupGarbage() else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) s = s + 1 except: gzSupport.showTraceback() arcpy.AddError("Unable to update datasets") success = False gzSupport.logDatasetProcess("replaceRows",name,success) finally: arcpy.ResetProgressor() arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(targetGDB) gzSupport.compressGDB(targetGDB) gzSupport.closeLog()
def main(argv=None): # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name success = True try: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] s = 0 arcpy.SetProgressor("Step", "Replacing rows...", 0, len(sFullNames), 1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Replacing rows using " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names t = tNames.index(name) except: # will get here if no match t = -1 if t > -1: # append if there is a match if len(datasetNames) == 0 or name.upper() in datasetNames: retVal = doInlineAppend(sFullNames[s], tFullNames[t]) gzSupport.logDatasetProcess(name, "replaceRows", retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + name) s = s + 1 except: gzSupport.showTraceback() arcpy.AddError(pymsg) success = False gzSupport.logDatasetProcess(name, "replaceRows", success) finally: arcpy.ResetProgressor() arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(targetGDB) gzSupport.compressGDB(targetGDB) gzSupport.closeLog()
def main(argv=None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace success = True arcpy.ResetProgressor() arcpy.env.Workspace = gzSupport.workspace uniqueValues = gzSupport.getFieldValues("Unique", fieldNames, datasets)[0] sources = gzSupport.listDatasets(gzSupport.workspace) sNames = sources[0] sFullNames = sources[1] arcpy.SetProgressor("Step", "Load by " + str(fieldNames) + "...", 0, len(uniqueValues) * len(datasets), 1) for value in uniqueValues: try: hasVersion = False gzSupport.addMessage(value) if desc.workspaceType == "RemoteDatabase" and versionName != None: arcpy.SetProgressorLabel("Creating Version " + versionName) hasVersion = gzSupport.createVersion(defaultWorkspace, defaultVersionName, versionName) if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase": arcpy.env.Workspace = targetWorkspace targets = gzSupport.listDatasets(targetWorkspace) tNames = targets[0] tFullNames = targets[1] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue arcpy.SetProgressorLabel("Loading Dataset " + name) targetTable = gzSupport.getFullName(name, tNames, tFullNames) sourceTable = gzSupport.getFullName(name, sNames, sFullNames) attrs = [f.name for f in arcpy.ListFields(targetTable)] expr = getExpression(attrs, fieldNames, value) arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr) tName = targetTable[targetTable.rfind("\\") + 1 :] tLocation = targetTable[0 : targetTable.rfind("\\")] if gzSupport.deleteRows(tLocation, tName, expr) == True: retVal = gzSupport.appendRows(sourceTable, targetTable, expr) if retVal == False: success == False else: success == False arcpy.SetProgressorPosition() if success == True: if desc.workspaceType == "RemoteDatabase": arcpy.SetProgressorLabel("Reconcile and Post") retVal = gzSupport.reconcilePost(defaultWorkspace, versionName, defaultVersionName) if retVal == False: success = False gzSupport.deleteVersion(defaultWorkspace, versionName) elif desc.workspaceType == "LocalDatabase": arcpy.SetProgressorLabel("Completed Update for " + str(value)) gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal) else: gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("Serious error", sys.argv[0], False) finally: arcpy.SetProgressorPosition() arcpy.ClearWorkspaceCache_management(defaultWorkspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True if desc.workspaceType == "RemoteDatabase" and success == True: analyze(defaultWorkspace, datasets, tNames, tFullNames) arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(defaultWorkspace) gzSupport.compressGDB(gzSupport.workspace) gzSupport.compressGDB(defaultWorkspace) gzSupport.closeLog() return
def main(argv=None): success = True if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: gzSupport.addMessage("Looking for drawings modified since " + since) minTime = dt.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p") cadFiles = getFileList(cadFolder, cadExt, minTime) if len(cadFiles) > 0: progBar = len(cadFiles) + 1 arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1) deleteExistingRows(datasets) arcpy.SetProgressorPosition() for item in cadFiles: cadPath = item[0] cadName = item[1] gzSupport.addMessage("Importing Drawing " + cadName) for dataset in datasets: try: name = dataset.getAttributeNode("sourceName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue arcpy.SetProgressorLabel("Loading " + name + " for " + cadName + "...") arcpy.env.Workspace = gzSupport.workspace targetName = dataset.getAttributeNode("targetName").nodeValue sourceWorkspace = os.path.join(cadPath, cadName) if not arcpy.Exists( os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage( os.path.join(gzSupport.workspace, targetName) + " does not exist") mode = "export" else: mode = "import" try: if mode == "import": retVal = gzSupport.importDataset( sourceWorkspace, name, targetName, dataset) elif mode == "export": retVal = gzSupport.exportDataset( sourceWorkspace, name, targetName, dataset) #retVal = importLayer(cadPath,cadName,dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False arcpy.env.Workspace = gzSupport.workspace gzSupport.logDatasetProcess(cadName, name, retVal) gzSupport.cleanupGarbage() arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("", "", False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv=None): success = True if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: gzSupport.addMessage("Looking for drawings modified since " + since) minTime = datetime.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p") cadFiles = gzSupport.getFileList(cadFolder, cadExt, minTime) if len(cadFiles) > 0: progBar = len(cadFiles) + 1 arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1) arcpy.SetProgressorPosition() gzSupport.deleteExistingRows(datasets) for item in cadFiles: cadPath = item[0] cadName = item[1] gzSupport.addMessage("Importing Drawing " + cadName) for dataset in datasets: try: name = dataset.getAttributeNode("sourceName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName, "Field") arcpy.SetProgressorLabel("Loading " + name + " for " + cadName + "...") arcpy.env.Workspace = gzSupport.workspace targetName = dataset.getAttributeNode("targetName").nodeValue sourceWorkspace = os.path.join(cadPath, cadName) exists = False if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage(os.path.join(gzSupport.workspace, targetName) + " does not exist") else: exists = True # arcpy.Delete_management(os.path.join(gzSupport.workspace,targetName)) try: if not exists == True: retVal = gzSupport.exportDataset(sourceWorkspace, name, targetName, dataset, xmlFields) addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName) else: retVal = importLayer(cadPath, cadName, dataset) addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False arcpy.env.Workspace = gzSupport.workspace gzSupport.logDatasetProcess(cadName, name, retVal) gzSupport.cleanupGarbage() arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("", "", False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv=None): success = True targetName = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Datasets...", 0, progBar, 1) #gzSupport.deleteExistingRows(datasets) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName, "Field") arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") if not arcpy.Exists(os.path.join(sourceWorkspace, sourceName)): gzSupport.addError( os.path.join(sourceWorkspace, sourceName + " does not exist, exiting")) return if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage( os.path.join(gzSupport.workspace, targetName) + " does not exist") else: arcpy.Delete_management( os.path.join(gzSupport.workspace, targetName)) arcpy.env.Workspace = gzSupport.workspace try: retVal = gzSupport.exportDataset(sourceWorkspace, sourceName, targetName, dataset, xmlFields) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName, targetName, retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("A Fatal Error occurred") success = False gzSupport.logDatasetProcess("extractWorkspaceToGDB", targetName, False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) try: arcpy.ClearWorkspaceCache_management(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(sourceWorkspace) except: gzSupport.addMessage("Unable to clear workspace cache, continuing") if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv = None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace success = True arcpy.ResetProgressor() arcpy.env.Workspace = gzSupport.workspace uniqueValues = gzSupport.getFieldValues("Unique",fieldNames,datasets)[0] sources = gzSupport.listDatasets(gzSupport.workspace) sNames = sources[0] sFullNames = sources[1] arcpy.SetProgressor("Step","Load by " + str(fieldNames) + "...",0,len(uniqueValues)*len(datasets),1) for value in uniqueValues: try: hasVersion = False gzSupport.addMessage(value) if desc.workspaceType == "RemoteDatabase" and versionName != None: arcpy.SetProgressorLabel("Creating Version " + versionName) hasVersion = gzSupport.createVersion(defaultWorkspace,defaultVersionName,versionName) if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase": arcpy.env.Workspace = targetWorkspace targets = gzSupport.listDatasets(targetWorkspace) tNames = targets[0] tFullNames = targets[1] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue arcpy.SetProgressorLabel("Loading Dataset " + name) targetTable = gzSupport.getFullName(name,tNames,tFullNames) sourceTable = gzSupport.getFullName(name,sNames,sFullNames) attrs = [f.name for f in arcpy.ListFields(targetTable)] expr = getExpression(attrs,fieldNames,value) arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr) tName = targetTable[targetTable.rfind("\\")+1:] tLocation = targetTable[0:targetTable.rfind("\\")] if gzSupport.deleteRows(tLocation,tName,expr) == True: retVal = gzSupport.appendRows(sourceTable,targetTable,expr) if retVal == False: success == False else: success == False arcpy.SetProgressorPosition() if success == True: if desc.workspaceType == "RemoteDatabase": arcpy.SetProgressorLabel("Reconcile and Post") retVal = gzSupport.reconcilePost(defaultWorkspace,versionName,defaultVersionName) if retVal == False: success = False gzSupport.deleteVersion(defaultWorkspace,versionName) elif desc.workspaceType == "LocalDatabase": arcpy.SetProgressorLabel("Completed Update for " + str(value)) gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) else: gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("Serious error",sys.argv[0],False) finally: arcpy.SetProgressorPosition() arcpy.ClearWorkspaceCache_management(defaultWorkspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True if desc.workspaceType == "RemoteDatabase" and success == True: analyze(defaultWorkspace,datasets,tNames,tFullNames) arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(defaultWorkspace) gzSupport.compressGDB(gzSupport.workspace) gzSupport.compressGDB(defaultWorkspace) gzSupport.closeLog() return