def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = gzSupport.getFullName(name,tNames, tFullNames) #table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") name = '' try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False fieldName = gzSupport.getNodeValue(field,"TargetName") if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False try: gzSupport.logDatasetProcess(name,fieldName,retVal) except: gzSupport.addMessage("Process not logged for field") arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess("sourceTargetQA",name,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log file tools\\log\\sourceTargetQA.log for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def main(argv = None): # main function - list the datasets and delete rows success = True try: names = gzSupport.listDatasets(sourceGDB) tNames = names[0] tFullNames = names[1] arcpy.SetProgressor("Step","Deleting rows...",0,len(tFullNames),1) i = 0 for name in tFullNames: arcpy.SetProgressorPosition(i) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each full name if len(datasetNames) == 0 or tNames[i].upper() in datasetNames: retVal = doTruncate(name) gzSupport.logDatasetProcess(name,"deleteRowsGDB",retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + tNames[i]) i += i except: gzSupport.showTraceback() gzSupport.addError(pymsg) success = False gzSupport.logDatasetProcess(name,"deleteRowsGDB",success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(sourceGDB)
def main(argv = None): startTime = gzSupport.getDBTime() gzSupport.addMessage(startTime) success = True OpenBrowserURL(gzSupport.xmlFileName) gzSupport.closeLog() return
def main(argv=None): success = True gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] name = '' for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue table = gzSupport.getFullName(name, tNames, tFullNames) gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue gzSupport.sourceNameField = dataset.getAttributeNode( "sourceNameField").nodeValue if not arcpy.Exists(table): gzSupport.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dataset.getElementsByTagName("Field") try: attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = gzSupport.workspace targetName = gzSupport.getNodeValue(field, "TargetName") gzSupport.addGizintaField(table, targetName, field, attrs) retVal = setFieldValues(table, fields) if retVal == False: success = False gzSupport.logDatasetProcess(name, "Fields", retVal) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("fieldCalculator", name, False) finally: arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information" ) if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv=None): # main function - list the datasets and delete rows success = True try: names = gzSupport.listDatasets(sourceGDB) tNames = names[0] tFullNames = names[1] arcpy.SetProgressor("Step", "Deleting rows...", 0, len(tFullNames), 1) i = 0 for name in tFullNames: arcpy.SetProgressorPosition(i) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each full name if len(datasetNames) == 0 or tNames[i].upper() in datasetNames: retVal = doTruncate(name) gzSupport.logDatasetProcess(name, "deleteRowsGDB", retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + tNames[i]) i += i except: gzSupport.showTraceback() gzSupport.addError(pymsg) success = False gzSupport.logDatasetProcess(name, "deleteRowsGDB", success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(sourceGDB)
def main(argv=None): success = True name = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Layers...", 0, progBar, 1) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") if not arcpy.Exists(sourceLayer): gzSupport.addError("Layer " + sourceLayer + " does not exist, exiting") return target = os.path.join(gzSupport.workspace, targetName) arcpy.env.Workspace = gzSupport.workspace if not arcpy.Exists(target): gzSupport.addMessage("Feature Class " + target + " does not exist") else: arcpy.Delete_management(target) try: retVal = exportDataset(sourceLayer, targetName, dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName, targetName, retVal) arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("extractLayerToGDB", name, False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv = None): # main function - list the datasets and delete rows success = True name = '' gzSupport.workspace = sourceGDB try: if len(datasetNames) == 0: names = gzSupport.listDatasets(sourceGDB) tNames = names[0] else: tNames = datasetNames arcpy.SetProgressor("Step","Deleting rows...",0,len(tNames),1) i = 0 for name in tNames: arcpy.SetProgressorPosition(i) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each full name if len(datasetNames) == 0 or gzSupport.nameTrimmer(name.upper()) in datasetNames: retVal = doTruncate(os.path.join(sourceGDB,name)) gzSupport.logDatasetProcess("deleteRowsGDB",name,retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) i = i + i except: gzSupport.showTraceback() gzSupport.addError("Failed to delete rows") success = False gzSupport.logDatasetProcess("deleteRowsGDB",name,success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(sourceGDB)
def main(argv = None): success = True targetName = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Datasets...", 0,progBar, 1) #gzSupport.deleteExistingRows(datasets) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName,"Field") arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...") if not arcpy.Exists(os.path.join(sourceWorkspace,sourceName)): gzSupport.addError(os.path.join(sourceWorkspace,sourceName + " does not exist, exiting")) return if not arcpy.Exists(os.path.join(gzSupport.workspace,targetName)): gzSupport.addMessage(os.path.join(gzSupport.workspace,targetName) + " does not exist") else: arcpy.Delete_management(os.path.join(gzSupport.workspace,targetName)) arcpy.env.Workspace = gzSupport.workspace try: retVal = gzSupport.exportDataset(sourceWorkspace,sourceName,targetName,dataset, xmlFields) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName,targetName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("A Fatal Error occurred") success = False gzSupport.logDatasetProcess("extractWorkspaceToGDB",targetName,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) try: arcpy.ClearWorkspaceCache_management(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(sourceWorkspace) except: gzSupport.addMessage("Unable to clear workspace cache, continuing") if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv = None): # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name arcpy.AddToolbox(os.path.join(os.path.dirname(sys.path[0]),"Gizinta.tbx")) success = True try: gzSupport.addMessage("Getting list of datasets for Target " + targetGDB) targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] gzSupport.addMessage("Getting list of datasets for Source " + sourceGDB) sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] t = 0 arcpy.SetProgressor("Step","Creating Files...",0,len(tNames),1) for name in tNames: arcpy.SetProgressorPosition(t) arcpy.SetProgressorLabel("Creating file for " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names s = sNames.index(name) except: # will get here if no match s = -1 if s > -1: # create file if there is a match fileName = outputFolder + os.sep + prefixStr + name.title() + ".xml" if os.path.exists(fileName): os.remove(fileName) try: arcpy.gzCreateProject_gizinta(sFullNames[s],tFullNames[t],fileName) retVal = True gzSupport.addMessage("Created " + fileName) except: retVal = False if retVal == False: gzSupport.addMessage("Failed to create file for " + name) gzSupport.showTraceback() success = False else: gzSupport.addMessage("Skipping " + name) t = t + 1 except: gzSupport.showTraceback() arcpy.AddError(pymsg) success = False finally: arcpy.ResetProgressor() arcpy.SetParameter(gzSupport.successParameterNumber, success) arcpy.env.workspace = targetGDB arcpy.RefreshCatalog(outputFolder) gzSupport.closeLog()
def main(argv = None): success = False xmlStrSource = writeDocument(sourceDataset,targetDataset,xmlFileName) if xmlStrSource != "": success = True arcpy.SetParameter(gzSupport.successParameterNumber, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv=None): success = False xmlStrSource = writeDocument(sourceDataset, targetDataset, xmlFileName) if xmlStrSource != "": success = True arcpy.SetParameter(gzSupport.successParameterNumber, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) tables = gzSupport.listDatasets(gzSupport.workspace) tNames = tables[0] tFullNames = tables[1] name = '' for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue table = gzSupport.getFullName(name,tNames,tFullNames) gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue gzSupport.sourceNameField = dataset.getAttributeNode("sourceNameField").nodeValue if not arcpy.Exists(table): gzSupport.addError("Feature Class " + table + " does not exist, exiting") arcpy.SetParameter(SUCCESS, False) return if not arcpy.TestSchemaLock(table): gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting") arcpy.SetParameter(SUCCESS, False) return -1 desc = arcpy.Describe(table) fields = dataset.getElementsByTagName("Field") try: attrs = [f.name for f in arcpy.ListFields(table)] for field in fields: arcpy.env.Workspace = gzSupport.workspace targetName = gzSupport.getNodeValue(field,"TargetName") gzSupport.addGizintaField(table,targetName,field,attrs) retVal = setFieldValues(table,fields) if retVal == False: success = False gzSupport.logDatasetProcess(name,"Fields",retVal) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("fieldCalculator",name,False) finally: arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv = None): success = True name = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Layers...", 0,progBar, 1) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...") if not arcpy.Exists(sourceLayer): gzSupport.addError("Layer " + sourceLayer + " does not exist, exiting") return target = os.path.join(gzSupport.workspace,targetName) arcpy.env.Workspace = gzSupport.workspace if not arcpy.Exists(target): gzSupport.addMessage("Feature Class " + target + " does not exist") else: arcpy.Delete_management(target) try: retVal = exportDataset(sourceLayer,targetName,dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName,targetName,retVal) arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("extractLayerToGDB",name,False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv = None): success = True gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) for dataset in datasets: arcpy.env.Workspace = gzSupport.workspace name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue table = os.path.join(gzSupport.workspace,name) fields = dataset.getElementsByTagName("Field") try: # run qa for dataset qaRulesDataset = dataset.getAttributeNode("qa").nodeValue gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name) retVal = runDatasetChecks(dataset,table,qaRulesDataset) if retVal == False: success = False for field in fields: sourceQA = False targetQA = False if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: sourceQA = True fieldName = gzSupport.getNodeValue(field,"SourceName") if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1: targetQA = True fieldName = gzSupport.getNodeValue(field,"TargetName") retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA) if retVal == False: success = False gzSupport.logDatasetProcess(name,fieldName,retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("Field Check Error") success = False gzSupport.logDatasetProcess(name,"",False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(table) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True arcpy.SetParameter(SUCCESS, success) gzSupport.closeLog() return
def main(argv=None): global wildcard success = False if wildcard == "" or wildcard == "#": wildcard = "" files = getFiles(folder, wildcard) xmlStrSource = writeDocument(files, outputFileName) if xmlStrSource != "": success = True arcpy.SetParameter(gzSupport.successParameterNumber, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv = None): global wildcard success = False if wildcard == "" or wildcard == "#": wildcard = "" files = getFiles(folder,wildcard) xmlStrSource = writeDocument(files,outputFileName) if xmlStrSource != "": success = True arcpy.SetParameter(gzSupport.successParameterNumber, success) arcpy.ResetProgressor() gzSupport.closeLog() return
def main(argv = None): # main function - list the source and target datasets, then append where there is a match on non-prefixed name success = True arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] s = 0 arcpy.SetProgressor("Step","Appending rows...",0,len(sFullNames),1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names t = tNames.index(name) except: # will get here if no match t = -1 if t > -1: # append if there is a match if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames: retVal = doAppend(sFullNames[s],tFullNames[t]) gzSupport.logDatasetProcess(name,"appendAlltoGDB",retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) s = s + 1 except: gzSupport.showTraceback() gzSupport.addError("Unable to append datasets") success = False gzSupport.logDatasetProcess(name,"appendAlltoGDB",success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(targetGDB)
def main(argv = None): startTime = gzSupport.getDBTime() gzSupport.addMessage(startTime) success = True xmlStrSource = getDocument(sourceDataset) xmlStrTarget = getDocument(targetDataset) OpenBrowserURL(xmlStrSource,xmlStrTarget) arcpy.SetParameter(gzSupport.successParameterNumber, success) arcpy.ResetProgressor() #t = Thread(target=OpenBrowserURL(xmlStr)) #t.start() #t.join() gzSupport.closeLog() return
def main(argv=None): # main function - list the source and target datasets, then append where there is a match on non-prefixed name success = True arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] s = 0 arcpy.SetProgressor("Step", "Appending rows...", 0, len(sFullNames), 1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Deleting rows in " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names t = tNames.index(name) except: # will get here if no match t = -1 if t > -1: # append if there is a match if len(datasetNames) == 0 or name.upper() in datasetNames: retVal = doAppend(sFullNames[s], tFullNames[t]) gzSupport.logDatasetProcess(name, "appendAlltoGDB", retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + name) s = s + 1 except: gzSupport.showTraceback() gzSupport.addError(pymsg) success = False gzSupport.logDatasetProcess(name, "appendAlltoGDB", success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(targetGDB)
def main(argv = None): # main function - list the source and target datasets, then append where there is a match on non-prefixed name success = True name = '' arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: if len(datasetNames) == 0: sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] else: sNames = datasetNames s = 0 arcpy.SetProgressor("Step","Appending rows...",0,len(sNames),1) for name in sNames: arcpy.SetProgressorPosition(s) arcpy.SetProgressorLabel(" Appending rows in " + name + "...") # for each source name if debug: gzSupport.addMessage(name) target = os.path.join(targetGDB,name) if arcpy.Exists(target): # append if there is a match if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames: retVal = doAppend(os.path.join(sourceGDB,name),target) gzSupport.logDatasetProcess("appendAlltoGDB",name,retVal) if retVal == False: success = False else: gzSupport.addMessage("Skipping " + gzSupport.nameTrimmer(name)) s = s + 1 except: gzSupport.showTraceback() gzSupport.addError("Unable to append datasets") success = False gzSupport.logDatasetProcess("appendAlltoGDB",name,success) finally: arcpy.SetParameter(SUCCESS, success) arcpy.ResetProgressor() gzSupport.closeLog() arcpy.ClearWorkspaceCache_management(targetGDB)
def main(argv=None): success = True if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: gzSupport.addMessage("Looking for drawings modified since " + since) minTime = datetime.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p") cadFiles = gzSupport.getFileList(cadFolder, cadExt, minTime) if len(cadFiles) > 0: progBar = len(cadFiles) + 1 arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1) arcpy.SetProgressorPosition() gzSupport.deleteExistingRows(datasets) for item in cadFiles: cadPath = item[0] cadName = item[1] gzSupport.addMessage("Importing Drawing " + cadName) for dataset in datasets: try: name = dataset.getAttributeNode("sourceName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName, "Field") arcpy.SetProgressorLabel("Loading " + name + " for " + cadName + "...") arcpy.env.Workspace = gzSupport.workspace targetName = dataset.getAttributeNode("targetName").nodeValue sourceWorkspace = os.path.join(cadPath, cadName) exists = False if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage(os.path.join(gzSupport.workspace, targetName) + " does not exist") else: exists = True # arcpy.Delete_management(os.path.join(gzSupport.workspace,targetName)) try: if not exists == True: retVal = gzSupport.exportDataset(sourceWorkspace, name, targetName, dataset, xmlFields) addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName) else: retVal = importLayer(cadPath, cadName, dataset) addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False arcpy.env.Workspace = gzSupport.workspace gzSupport.logDatasetProcess(cadName, name, retVal) gzSupport.cleanupGarbage() arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("", "", False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv=None): success = True targetName = '' try: if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) if len(datasets) > 0: progBar = len(datasets) + 1 arcpy.SetProgressor("step", "Importing Datasets...", 0, progBar, 1) #gzSupport.deleteExistingRows(datasets) arcpy.SetProgressorPosition() for dataset in datasets: gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue sourceName = dataset.getAttributeNode("sourceName").nodeValue targetName = dataset.getAttributeNode("targetName").nodeValue xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName, "Field") arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName + "...") if not arcpy.Exists(os.path.join(sourceWorkspace, sourceName)): gzSupport.addError( os.path.join(sourceWorkspace, sourceName + " does not exist, exiting")) return if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage( os.path.join(gzSupport.workspace, targetName) + " does not exist") else: arcpy.Delete_management( os.path.join(gzSupport.workspace, targetName)) arcpy.env.Workspace = gzSupport.workspace try: retVal = gzSupport.exportDataset(sourceWorkspace, sourceName, targetName, dataset, xmlFields) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False gzSupport.logDatasetProcess(sourceName, targetName, retVal) arcpy.SetProgressorPosition() except: gzSupport.showTraceback() gzSupport.addError("A Fatal Error occurred") success = False gzSupport.logDatasetProcess("extractWorkspaceToGDB", targetName, False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) try: arcpy.ClearWorkspaceCache_management(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(sourceWorkspace) except: gzSupport.addMessage("Unable to clear workspace cache, continuing") if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv=None): # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) logname = os.path.join(outputFolder, 'gzCreateProjectFiles.log') gzSupport.startLog() success = True try: gzSupport.addMessage("Getting list of datasets for Target " + targetGDB) targets = gzSupport.listDatasets(targetGDB) tNames = targets[0] tFullNames = targets[1] gzSupport.addMessage("Getting list of datasets for Source " + sourceGDB) sources = gzSupport.listDatasets(sourceGDB) sNames = sources[0] sFullNames = sources[1] t = 0 arcpy.SetProgressor("Step", "Creating Files...", 0, len(tNames), 1) for name in tNames: arcpy.SetProgressorPosition(t) arcpy.SetProgressorLabel("Creating file for " + name + "...") # for each source name if debug: gzSupport.addMessage(name) try: # look for the matching name in target names s = sNames.index(name) except: # will get here if no match s = -1 if s > -1: # create file if there is a match fileName = outputFolder + os.sep + prefixStr + name.title( ) + ".xml" if os.path.exists(fileName): os.remove(fileName) try: #arcpy.AddToolbox(os.path.join(dir,"Gizinta.tbx")) #arcpy.gzCreateProject_gizinta(sFullNames[s],tFullNames[t],fileName) # this doesn't always work... gzCreateProject.createGzFile(sFullNames[s], tFullNames[t], fileName) retVal = True gzSupport.addMessage("Created " + fileName) except: retVal = False if retVal == False: gzSupport.addMessage("Failed to create file for " + name) gzSupport.showTraceback() success = False else: gzSupport.addMessage("Skipping " + name) t = t + 1 except: gzSupport.showTraceback() arcpy.AddError("Error creating project files") success = False finally: arcpy.ResetProgressor() arcpy.SetParameter(gzSupport.successParameterNumber, success) arcpy.env.workspace = targetGDB arcpy.RefreshCatalog(outputFolder) gzSupport.closeLog()
def main(argv=None): success = True OpenBrowserURL(gzSupport.xmlFileName) gzSupport.closeLog() return
def main(argv=None): global sourceDataset, targetDataset, xmlFileName # os.path.realpath( __file__).replace('.py','.log') gzSupport.startLog() createGzFile(sourceDataset, targetDataset, xmlFileName) gzSupport.closeLog()
def main(argv=None): success = True if not arcpy.Exists(gzSupport.workspace): gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create") gzSupport.createGizintaGeodatabase() else: gzSupport.compressGDB(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) try: gzSupport.addMessage("Looking for drawings modified since " + since) minTime = dt.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p") cadFiles = getFileList(cadFolder, cadExt, minTime) if len(cadFiles) > 0: progBar = len(cadFiles) + 1 arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1) deleteExistingRows(datasets) arcpy.SetProgressorPosition() for item in cadFiles: cadPath = item[0] cadName = item[1] gzSupport.addMessage("Importing Drawing " + cadName) for dataset in datasets: try: name = dataset.getAttributeNode("sourceName").nodeValue except: name = dataset.getAttributeNode("name").nodeValue gzSupport.sourceIDField = dataset.getAttributeNode( "sourceIDField").nodeValue arcpy.SetProgressorLabel("Loading " + name + " for " + cadName + "...") arcpy.env.Workspace = gzSupport.workspace targetName = dataset.getAttributeNode("targetName").nodeValue sourceWorkspace = os.path.join(cadPath, cadName) if not arcpy.Exists( os.path.join(gzSupport.workspace, targetName)): gzSupport.addMessage( os.path.join(gzSupport.workspace, targetName) + " does not exist") mode = "export" else: mode = "import" try: if mode == "import": retVal = gzSupport.importDataset( sourceWorkspace, name, targetName, dataset) elif mode == "export": retVal = gzSupport.exportDataset( sourceWorkspace, name, targetName, dataset) #retVal = importLayer(cadPath,cadName,dataset) if retVal == False: success = False except: gzSupport.showTraceback() success = False retVal = False arcpy.env.Workspace = gzSupport.workspace gzSupport.logDatasetProcess(cadName, name, retVal) gzSupport.cleanupGarbage() arcpy.SetProgressorPosition() except: gzSupport.addError("A Fatal Error occurred") gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("", "", False) finally: arcpy.ResetProgressor() arcpy.RefreshCatalog(gzSupport.workspace) arcpy.ClearWorkspaceCache_management(gzSupport.workspace) gzSupport.cleanupGarbage() if success == False: gzSupport.addError( "Errors occurred during process, look in log files for more information" ) if gzSupport.ignoreErrors == True: success = True gzSupport.closeLog() arcpy.SetParameter(SUCCESS, success)
def main(argv = None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace success = True arcpy.ResetProgressor() arcpy.env.Workspace = gzSupport.workspace uniqueValues = gzSupport.getFieldValues("Unique",fieldNames,datasets)[0] sources = gzSupport.listDatasets(gzSupport.workspace) sNames = sources[0] sFullNames = sources[1] arcpy.SetProgressor("Step","Load by " + str(fieldNames) + "...",0,len(uniqueValues)*len(datasets),1) for value in uniqueValues: try: hasVersion = False gzSupport.addMessage(value) if desc.workspaceType == "RemoteDatabase" and versionName != None: arcpy.SetProgressorLabel("Creating Version " + versionName) hasVersion = gzSupport.createVersion(defaultWorkspace,defaultVersionName,versionName) if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase": arcpy.env.Workspace = targetWorkspace targets = gzSupport.listDatasets(targetWorkspace) tNames = targets[0] tFullNames = targets[1] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue arcpy.SetProgressorLabel("Loading Dataset " + name) targetTable = gzSupport.getFullName(name,tNames,tFullNames) sourceTable = gzSupport.getFullName(name,sNames,sFullNames) attrs = [f.name for f in arcpy.ListFields(targetTable)] expr = getExpression(attrs,fieldNames,value) arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr) tName = targetTable[targetTable.rfind("\\")+1:] tLocation = targetTable[0:targetTable.rfind("\\")] if gzSupport.deleteRows(tLocation,tName,expr) == True: retVal = gzSupport.appendRows(sourceTable,targetTable,expr) if retVal == False: success == False else: success == False arcpy.SetProgressorPosition() if success == True: if desc.workspaceType == "RemoteDatabase": arcpy.SetProgressorLabel("Reconcile and Post") retVal = gzSupport.reconcilePost(defaultWorkspace,versionName,defaultVersionName) if retVal == False: success = False gzSupport.deleteVersion(defaultWorkspace,versionName) elif desc.workspaceType == "LocalDatabase": arcpy.SetProgressorLabel("Completed Update for " + str(value)) gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) else: gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("Serious error",sys.argv[0],False) finally: arcpy.SetProgressorPosition() arcpy.ClearWorkspaceCache_management(defaultWorkspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True if desc.workspaceType == "RemoteDatabase" and success == True: analyze(defaultWorkspace,datasets,tNames,tFullNames) arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(defaultWorkspace) gzSupport.compressGDB(gzSupport.workspace) gzSupport.compressGDB(defaultWorkspace) gzSupport.closeLog() return
def main(argv = None): success = True OpenBrowserURL(gzSupport.xmlFileName) gzSupport.closeLog() return
def main(argv=None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace success = True arcpy.ResetProgressor() arcpy.env.Workspace = gzSupport.workspace uniqueValues = gzSupport.getFieldValues("Unique", fieldNames, datasets)[0] sources = gzSupport.listDatasets(gzSupport.workspace) sNames = sources[0] sFullNames = sources[1] arcpy.SetProgressor("Step", "Load by " + str(fieldNames) + "...", 0, len(uniqueValues) * len(datasets), 1) for value in uniqueValues: try: hasVersion = False gzSupport.addMessage(value) if desc.workspaceType == "RemoteDatabase" and versionName != None: arcpy.SetProgressorLabel("Creating Version " + versionName) hasVersion = gzSupport.createVersion(defaultWorkspace, defaultVersionName, versionName) if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase": arcpy.env.Workspace = targetWorkspace targets = gzSupport.listDatasets(targetWorkspace) tNames = targets[0] tFullNames = targets[1] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue arcpy.SetProgressorLabel("Loading Dataset " + name) targetTable = gzSupport.getFullName(name, tNames, tFullNames) sourceTable = gzSupport.getFullName(name, sNames, sFullNames) attrs = [f.name for f in arcpy.ListFields(targetTable)] expr = getExpression(attrs, fieldNames, value) arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr) tName = targetTable[targetTable.rfind("\\") + 1 :] tLocation = targetTable[0 : targetTable.rfind("\\")] if gzSupport.deleteRows(tLocation, tName, expr) == True: retVal = gzSupport.appendRows(sourceTable, targetTable, expr) if retVal == False: success == False else: success == False arcpy.SetProgressorPosition() if success == True: if desc.workspaceType == "RemoteDatabase": arcpy.SetProgressorLabel("Reconcile and Post") retVal = gzSupport.reconcilePost(defaultWorkspace, versionName, defaultVersionName) if retVal == False: success = False gzSupport.deleteVersion(defaultWorkspace, versionName) elif desc.workspaceType == "LocalDatabase": arcpy.SetProgressorLabel("Completed Update for " + str(value)) gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal) else: gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("Serious error", sys.argv[0], False) finally: arcpy.SetProgressorPosition() arcpy.ClearWorkspaceCache_management(defaultWorkspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True if desc.workspaceType == "RemoteDatabase" and success == True: analyze(defaultWorkspace, datasets, tNames, tFullNames) arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(defaultWorkspace) gzSupport.compressGDB(gzSupport.workspace) gzSupport.compressGDB(defaultWorkspace) gzSupport.closeLog() return