def sync(inputDrawing,playlists,GISStagingDefault_sde,GISProdDefault_sde,logfile): # sync from the staging database to prod. The staging database should have rows for the current drawing # This process will replace rows in the production database for the floor/drawing, it uses change detection if it is set up in the Gizinta Xml files global log log = logfile plists = playlists.split(" ") arcpy.AddMessage(playlists) datasets = [] for playlist in plists: #xmlFile = os.path.join(gse.configFolder,playlist + ".xml") datasets = datasets + gzSupport.getXmlElements(playlist,"Dataset") gzSupport.workspace = GISProdDefault_sde retVal = True if inputDrawing == '*': dwg = '*' drawingID = 'all' else: dwg = inputDrawing[inputDrawing.rfind(os.sep)+1:] drawingID = gseDrawing.getDrawingFromName(dwg) processed = [] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue try: name = dataset.getAttributeNode("targetName").nodeValue # special case to handle multi sources to one target. except: pass if name not in processed: sourceDataset = os.path.join(GISStagingDefault_sde,name) targetDataset = os.path.join(GISProdDefault_sde,name) changeNode = dataset.getElementsByTagName("ChangeDetection")[0] if changeNode != None and changeNode != []: try: processed.index(name) except: processed.append(name) # if there is a change node then do change detection using views arcpy.env.workspace = GISStagingDefault_sde desc = arcpy.Describe(os.path.join(GISProdDefault_sde,name)) idField = changeNode.getAttributeNode("idField").nodeValue try: viewIdField = changeNode.getAttributeNode("viewIdField").nodeValue if debug == True: msg("Using Change detection id field " + viewIdField) except: viewIdField = "floorid" # the default if inputDrawing != '*': if debug == True: msg("Using default id field " + viewIdField) whereClause = buildViewWhereClause(viewIdField,inputDrawing) adds = getChanges(changeNode,"exceptProductionView",GISStagingDefault_sde,whereClause,idField) deletes = getChanges(changeNode,"exceptStagingView",GISStagingDefault_sde,whereClause,idField) if len(deletes) > 0: deleteExpr = getDeltaWhereClause(desc,idField,deletes) arcpy.env.workspace = GISProdDefault_sde retcode = gzSupport.deleteRows(GISProdDefault_sde,name,deleteExpr) if retcode == True: msg(str(len(deletes)) + " Rows deleted in prod for " + name) else: msg("Failed to delete rows") retVal = False #else: # msg("No changed rows found to delete") if len(adds) > 0: addExpr = getDeltaWhereClause(desc,idField,adds) arcpy.env.workspace = GISProdDefault_sde gzSupport.workspace = GISProdDefault_sde retcode = gzSupport.appendRows(sourceDataset,targetDataset,addExpr) if retcode == True: msg(str(len(adds)) + " Rows appended in prod for " + name) else: msg("Failed to append rows for " + name) retVal = False #else: # msg("No changed rows found to add for " + name) del adds del deletes else: # if there is no change node then replace everything for a floor if inputDrawing == '*': idField = '' else: idField = "FLOORID" whereClause = buildViewWhereClause(idField,inputDrawing) desc = arcpy.Describe(sourceDataset) view = "tempCount" gzSupport.workspace = GISStagingDefault_sde arcpy.env.workspace = GISStagingDefault_sde gzSupport.makeView(desc.DataElementType,GISStagingDefault_sde,name,view,whereClause,[]) res = arcpy.GetCount_management(view) count = int(res.getOutput(0)) if(count > 0): msg("Replacing rows for " + name + ", " + str(count) + " rows") retcode = gzSupport.deleteRows(GISProdDefault_sde,name,whereClause) retcode = gzSupport.appendRows(sourceDataset,targetDataset,whereClause) else: msg("No rows in source database to update for " + name) del view #msg(processed) return retVal
def main(argv=None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace success = True arcpy.ResetProgressor() arcpy.env.Workspace = gzSupport.workspace uniqueValues = gzSupport.getFieldValues("Unique", fieldNames, datasets)[0] sources = gzSupport.listDatasets(gzSupport.workspace) sNames = sources[0] sFullNames = sources[1] arcpy.SetProgressor("Step", "Load by " + str(fieldNames) + "...", 0, len(uniqueValues) * len(datasets), 1) for value in uniqueValues: try: hasVersion = False gzSupport.addMessage(value) if desc.workspaceType == "RemoteDatabase" and versionName != None: arcpy.SetProgressorLabel("Creating Version " + versionName) hasVersion = gzSupport.createVersion(defaultWorkspace, defaultVersionName, versionName) if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase": arcpy.env.Workspace = targetWorkspace targets = gzSupport.listDatasets(targetWorkspace) tNames = targets[0] tFullNames = targets[1] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue arcpy.SetProgressorLabel("Loading Dataset " + name) targetTable = gzSupport.getFullName(name, tNames, tFullNames) sourceTable = gzSupport.getFullName(name, sNames, sFullNames) attrs = [f.name for f in arcpy.ListFields(targetTable)] expr = getExpression(attrs, fieldNames, value) arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr) tName = targetTable[targetTable.rfind("\\") + 1 :] tLocation = targetTable[0 : targetTable.rfind("\\")] if gzSupport.deleteRows(tLocation, tName, expr) == True: retVal = gzSupport.appendRows(sourceTable, targetTable, expr) if retVal == False: success == False else: success == False arcpy.SetProgressorPosition() if success == True: if desc.workspaceType == "RemoteDatabase": arcpy.SetProgressorLabel("Reconcile and Post") retVal = gzSupport.reconcilePost(defaultWorkspace, versionName, defaultVersionName) if retVal == False: success = False gzSupport.deleteVersion(defaultWorkspace, versionName) elif desc.workspaceType == "LocalDatabase": arcpy.SetProgressorLabel("Completed Update for " + str(value)) gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal) else: gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal) gzSupport.cleanupGarbage() except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("Serious error", sys.argv[0], False) finally: arcpy.SetProgressorPosition() arcpy.ClearWorkspaceCache_management(defaultWorkspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True if desc.workspaceType == "RemoteDatabase" and success == True: analyze(defaultWorkspace, datasets, tNames, tFullNames) arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(defaultWorkspace) gzSupport.compressGDB(gzSupport.workspace) gzSupport.compressGDB(defaultWorkspace) gzSupport.closeLog() return
def main(argv = None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace success = True arcpy.ResetProgressor() arcpy.env.Workspace = gzSupport.workspace uniqueValues = gzSupport.getFieldValues("Unique",fieldNames,datasets)[0] sources = gzSupport.listDatasets(gzSupport.workspace) sNames = sources[0] sFullNames = sources[1] arcpy.SetProgressor("Step","Load by " + str(fieldNames) + "...",0,len(uniqueValues)*len(datasets),1) for value in uniqueValues: try: hasVersion = False gzSupport.addMessage(value) if desc.workspaceType == "RemoteDatabase" and versionName != None: arcpy.SetProgressorLabel("Creating Version " + versionName) hasVersion = gzSupport.createVersion(defaultWorkspace,defaultVersionName,versionName) if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase": arcpy.env.Workspace = targetWorkspace targets = gzSupport.listDatasets(targetWorkspace) tNames = targets[0] tFullNames = targets[1] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue arcpy.SetProgressorLabel("Loading Dataset " + name) targetTable = gzSupport.getFullName(name,tNames,tFullNames) sourceTable = gzSupport.getFullName(name,sNames,sFullNames) attrs = [f.name for f in arcpy.ListFields(targetTable)] expr = getExpression(attrs,fieldNames,value) arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr) tName = targetTable[targetTable.rfind("\\")+1:] tLocation = targetTable[0:targetTable.rfind("\\")] if gzSupport.deleteRows(tLocation,tName,expr) == True: retVal = gzSupport.appendRows(sourceTable,targetTable,expr) if retVal == False: success == False else: success == False arcpy.SetProgressorPosition() if success == True: if desc.workspaceType == "RemoteDatabase": arcpy.SetProgressorLabel("Reconcile and Post") retVal = gzSupport.reconcilePost(defaultWorkspace,versionName,defaultVersionName) if retVal == False: success = False gzSupport.deleteVersion(defaultWorkspace,versionName) elif desc.workspaceType == "LocalDatabase": arcpy.SetProgressorLabel("Completed Update for " + str(value)) gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) else: gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("Serious error",sys.argv[0],False) finally: arcpy.SetProgressorPosition() arcpy.ClearWorkspaceCache_management(defaultWorkspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True if desc.workspaceType == "RemoteDatabase" and success == True: analyze(defaultWorkspace,datasets,tNames,tFullNames) arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(defaultWorkspace) gzSupport.compressGDB(gzSupport.workspace) gzSupport.compressGDB(defaultWorkspace) gzSupport.closeLog() return