def _reconcileStaging(self, replica): func = 'SqlServerImporter.reconcile_staging' logging.info("Begin " + func) try: logging.debug("Reconciling data in " + replica.stagingWorkspace + ", from " + replica.sqlserverEditVersion + " to " + replica.stagingDefaultVersion) arcpy.ReconcileVersions_management( replica.stagingWorkspace, "ALL_VERSIONS", replica.stagingDefaultVersion, replica.sqlserverEditVersion, "NO_LOCK_ACQUIRED", "NO_ABORT", "BY_OBJECT", "FAVOR_TARGET_VERSION", "POST", "KEEP_VERSION") logging.debug("Finished reconciling data.") logging.debug("Compressing data in Staging SDE") arcpy.Compress_management(replica.stagingWorkspace) logging.debug("Finished compressing data in Staging SDE") return True except arcpy.ExecuteError: msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) logging.error("ArcGIS error: %s", msgs) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] msg = "Error in " + func + ":\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) arcpy.AddError(msg) logging.error(msg) logging.info("End " + func) return False
def _reconcileStaging(self): func = 'WarehouseToSde.reconcile_staging' logging.info("Begin " + func) try: logging.debug( "Reconciling data from staging BG-BASE to staging DEFAULT") arcpy.ReconcileVersions_management( self._stagingWorkspace(), "ALL_VERSIONS", "dbo.DEFAULT", self._bgbaseEditVersion(), "NO_LOCK_ACQUIRED", "NO_ABORT", "BY_OBJECT", "FAVOR_TARGET_VERSION", "POST", "KEEP_VERSION") logging.debug( "Finished reconciling data from staging GIS to staging DEFAULT" ) logging.debug("Compressing data in Staging SDE") arcpy.Compress_management(self._stagingWorkspace()) logging.debug("Finished compressing data in Staging SDE") return True except arcpy.ExecuteError: msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) logging.error("ArcGIS error: %s", msgs) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] msg = "Error in " + func + ":\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) arcpy.AddError(msg) logging.error(msg) logging.info("End " + func) return False
def _syncWithProd(self): func = '_syncWithProd' logging.info("Begin " + func) try: logging.debug("Synchronizing data from production to staging") arcpy.SynchronizeChanges_management(self._stagingWorkspace(), self._replica(), self._productionWorkspace(), "FROM_GEODATABASE1_TO_2", "IN_FAVOR_OF_GDB1", "BY_OBJECT", "DO_NOT_RECONCILE") logging.debug( "Finished synchronizing data from production to staging") logging.debug("Compressing data in Production SDE") arcpy.Compress_management(self._productionWorkspace()) logging.debug("Finished compressing data in Production SDE") except arcpy.ExecuteError: msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) logging.error("ArcGIS error: %s", msgs) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] msg = "Error in " + func + ":\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) arcpy.AddError(msg) logging.error(msg) logging.info("End sync with staging") return
def _reconcileStaging(self): func = '_reconcileStaging' logging.info("Begin " + func) try: logging.debug('Getting edits versions') versions = self._stagingEditVersions().split(',') if len(versions) > 0: logging.debug('Found ' + str(len(versions)) + ' edit versions to reconcile.') logging.debug("Reconciling data with Staging DEFAULT") arcpy.ReconcileVersions_management( self._stagingWorkspace(), "ALL_VERSIONS", "dbo.DEFAULT", ";".join(versions), "NO_LOCK_ACQUIRED", "NO_ABORT", "BY_OBJECT", "FAVOR_TARGET_VERSION", "POST", "KEEP_VERSION") logging.debug("Finished reconciling data with Staging DEFAULT") logging.debug("Compressing data in Staging SDE") arcpy.Compress_management(self._stagingWorkspace()) logging.debug("Finished compressing data in Staging SDE") except arcpy.ExecuteError: msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) logging.error("ArcGIS error: %s", msgs) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] msg = "Error in " + func + ":\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) arcpy.AddError(msg) logging.error(msg) logging.info("End " + func) return
def compress_rebuild_analyze(): try: log.write(datetime.datetime.now().strftime("%H:%M:%S %m/%d/%Y ") + "Compressing database...\n") arcpy.Compress_management(WORKSPACE_SDE) log.write(datetime.datetime.now().strftime("%H:%M:%S %m/%d/%Y ") + "Rebuilding indexes...\n") arcpy.RebuildIndexes_management(WORKSPACE_SDE, "SYSTEM") log.write(datetime.datetime.now().strftime("%H:%M:%S %m/%d/%Y ") + "Analyzing datasets...\n") arcpy.AnalyzeDatasets_management(WORKSPACE_SDE, "SYSTEM") except Exception: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = datetime.datetime.now().strftime( "%H:%M:%S %m/%d/%Y " ) + "An error has occured.\nTraceback info:\n" + tbinfo + "Error Info:\n" + " " + str( sys.exc_info()[1]) if arcpy.GetMessages(2) != "": msgs = "\n\n-ARCPY ERRORS-\n" + arcpy.GetMessages( 2) + "\n\nScript finished with errors." else: msgs = "\n\nScript finished with errors." log.write(pymsg) log.write(msgs) log.write(datetime.datetime.now().strftime("%H:%M:%S %m/%d/%Y ") + "Script failed. Recreating versions...\n") recreate_versions() log.write(datetime.datetime.now().strftime("%H:%M:%S %m/%d/%Y ") + "Reaccepting connections...\n") clean_up()
def ship(self): try: # Run commands as user SDE to compress and analyze database and system tables sdeconnection = join(self.garage, 'UGGP', 'sde@[email protected]') self.log.info('arcpy.env.workspace: %s', arcpy.env.workspace) self.log.info('connection: %s', sdeconnection) description = arcpy.Describe(sdeconnection) self.log.info('workspace full props: %s', description.fullPropsRetrieved) self.log.info('workspace valid: %s', 'SdeWorkspace' in getattr(description, 'workspaceFactoryProgID', '')) self.log.info('connection exists: %s', arcpy.Exists(sdeconnection)) arcpy.env.workspace = sdeconnection try: arcpy.Compress_management(sdeconnection) self.log.info('Compress Complete') except Exception as ex: self.log.error('compress exception: %s', ex.message, exc_info=True) arcpy.AnalyzeDatasets_management(sdeconnection, 'SYSTEM') self.log.info('Analyze System Tables Complete') userconnections = [join(self.garage, 'UGGP', 'uggpadmin@[email protected]')] for con in userconnections: # set workspace # the user in this workspace must be the owner of the data to analyze. workspace = con # set the workspace environment arcpy.env.workspace = workspace # NOTE: Analyze Datasets can accept a Python list of datasets. # Get a list of all the datasets the user has access to. # First, get all the stand alone tables, feature classes and rasters. dataList = arcpy.ListTables() + arcpy.ListFeatureClasses() + arcpy.ListRasters() # Next, for feature datasets get all of the datasets and featureclasses # from the list and add them to the master list. for dataset in arcpy.ListDatasets('', 'Feature'): arcpy.env.workspace = join(workspace, dataset) dataList += arcpy.ListFeatureClasses() + arcpy.ListDatasets() #: filter out topology items dataList = [table for table in dataList if 'topology' not in table.lower()] # reset the workspace arcpy.env.workspace = workspace # Execute analyze datasets # Note: to use the 'SYSTEM' option the workspace user must be an administrator. if len(dataList) > 0: arcpy.AnalyzeDatasets_management(workspace, 'NO_SYSTEM', dataList, 'ANALYZE_BASE', 'ANALYZE_DELTA', 'ANALYZE_ARCHIVE') self.log.info('Analyze Complete') except Exception: self.send_email('*****@*****.**', 'Error with {}'.format(__file__), format_exc()) raise
def DatabaseCompression(ConnectionNames): ''' This function takes database connections and then compresses those databases to remove unused states from the tree. This is done to improve query performance. ''' for Connection in ConnectionNames: arcpy.Compress_management(Connection) print Connection + " has been compressed"
def ship(self): try: #: Run commands as user SDE to compress and analyze database and system tables sdeconnection = join(self.garage, 'UEMP', 'sde@[email protected]') arcpy.Compress_management(sdeconnection) self.log.info('Compress Complete') arcpy.AnalyzeDatasets_management(sdeconnection, 'SYSTEM') self.log.info('Analyze System Tables Complete') userconnections = [ join(self.garage, 'UEMP', 'uempadmin@[email protected]') ] for con in userconnections: # set workspace # the user in this workspace must be the owner of the data to analyze. workspace = con # set the workspace environment arcpy.env.workspace = workspace # NOTE: Analyze Datasets can accept a Python list of datasets. # Get a list of all the datasets the user has access to. # First, get all the stand alone tables, feature classes and rasters. dataList = arcpy.ListTables() + arcpy.ListFeatureClasses( ) + arcpy.ListRasters() # Next, for feature datasets get all of the datasets and featureclasses # from the list and add them to the master list. for dataset in arcpy.ListDatasets('', 'Feature'): arcpy.env.workspace = join(workspace, dataset) dataList += arcpy.ListFeatureClasses( ) + arcpy.ListDatasets() # reset the workspace arcpy.env.workspace = workspace # Execute analyze datasets # Note: to use the 'SYSTEM' option the workspace user must be an administrator. if len(dataList) > 0: arcpy.AnalyzeDatasets_management(workspace, 'NO_SYSTEM', dataList, 'ANALYZE_BASE', 'ANALYZE_DELTA', 'ANALYZE_ARCHIVE') self.log.info('Analyze Complete') except Exception: self.send_email('*****@*****.**', 'Error with {}'.format(__file__), format_exc()) raise
def dbCompress(inputWS): if all([ arcpy.Exists(inputWS), arcpy.Compress_management(inputWS), arcpy.Exists(inputWS) ]): logging.info(f"Workspace {inputWS} clear to continue.") return True else: logging.error(f"!!!!!!!! ERROR WITH WORKSPACE {inputWS} !!!!!!!!") return False
def compress(self): states_removed = 0 if self.isadministrator(): if self.interpret(arcpy.Compress_management(self.sdeconn)) == 0: states_removed = cx_sde.selectavalue( self.sdeconn, self.fetchsql('{0}'.format('get_compress_states.sql'))) return states_removed
def compress(self): self.current_item += 1 self.itemPrint() # ↓ try: arcpy.Compress_management(self.sde_connection) self.itemPrint() # ↑ maintenance_log.appendToFile([ self.maintenance_item[self.current_item], self.maintenance_status[1] ]) except Exception as e: self.handleException(e)
def compress_prod(): logging.info("Begin compress prod") try: logging.debug("Compressing data in Production SDE Default") arcpy.Compress_management(production_workspace) logging.debug("Finished compressing data in Production SDE Default") except arcpy.ExecuteError: msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) logging.error("ArcGIS error: %s", msgs) except Exception as e: msg = e.message arcpy.AddError(msg) logging.error("Python error: %s", msg) logging.info("End compress prod") return
def compress_staging(): logging.info("Begin compress staging") try: logging.debug("Compressing data in Staging SDE BG-BASE") arcpy.Compress_management(staging_edit_workspace) logging.debug("Finished compressing data in Staging SDE BG-BASE") return True except arcpy.ExecuteError: msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) logging.error("ArcGIS error: %s", msgs) except Exception as e: msg = e.message arcpy.AddError(msg) logging.error("Python error: %s", msg) logging.info("End compress staging") return False;
def main(): try: import time print "Started at " + time.strftime("%Y/%m/%d %H.%M.%S", time.localtime()) xFlag = 0 import arcpy, os, sys, shutil # Get parameters from config file sys.path.append( "//ccgisfiles01m/gisdata/prdba/crupdates/CCPythonLib/Appl/") import getConfig dbPrConn = getConfig.main('user', 'crscl', 'path', 'sclAdmin') # Reconcile and Post Final to Admin xStep = 'reconcile and post final to admin' arcpy.ReconcileVersion_management(dbPrConn, "SCLFINAL.Final", "SCLADMIN.Admin", "BY_OBJECT", "FAVOR_TARGET_VERSION", "LOCK_ACQUIRED", "ABORT_CONFLICTS", "POST") # Reconcile and Post Admin to Default xStep = 'reconcile and post admin to default' arcpy.ReconcileVersion_management(dbPrConn, "SCLADMIN.Admin", "sde.DEFAULT", "BY_OBJECT", "FAVOR_TARGET_VERSION", "LOCK_ACQUIRED", "ABORT_CONFLICTS", "POST") # Compress database xStep = 'compress database' arcpy.Compress_management(dbPrConn) print "\nCompleted at " + time.strftime("%Y/%m/%d %H.%M.%S", time.localtime()) except: xFlag = 1 ex = sys.exc_info()[1] eMsg = ex.args[0] xStep = '{0}: {1}'.format(xStep, eMsg) print 'Error at the following process: {0}'.format(xStep) finally: return (xStep, xFlag)
def compressGDB(workspace): # compact or compress the workspace retVal = False desc = arcpy.Describe(workspace) if desc.workspaceType == "RemoteDatabase": try: addMessageLocal("Database Compress...") arcpy.Compress_management(workspace) retVal = True except: addMessageLocal("Database Compress failed, continuing") elif desc.workspaceType == "LocalDatabase": try: addMessageLocal("Database Compact...") arcpy.Compact_management(workspace) retVal = True except: addMessageLocal("Local Database Compact failed, continuing") return retVal
def compress(self): for root, dirs, files in os.walk(self.dbfolder): for fileName in files: fullPath = os.path.join(root, fileName) try: snapshot = datetime.today() print("Compressing " + fileName) arcpy.Compress_management(fullPath) txtPass = open(self.compresslogpass, 'a') txtPass.write("\n" + fileName + " succeeded at: " + str(snapshot)) txtPass.close() except: snapshot = datetime.today() print("FAILED: " + fileName) txtFail = open(self.compresslogfail, 'a') txtFail.write("\n" + fileName + " failed at: " + str(snapshot)) txtFail.close()
def RebuildGisIndexes(Connections): ''' This function takes Geodatabase connections as inputs (in the form of a list) and rebuilds the indexes for all ESRI registered tables in the database. ''' for Connection in Connections: env.workspace = Connection arcpy.Compress_management(Connection) print Connection + " was compressed" DataList = arcpy.ListTables() + arcpy.ListFeatureClasses() GisDataList = [] for item in DataList: # Exclude non-ESRI tables from having indexes updated if 'DBO' in item: GisDataList.append(item) arcpy.RebuildIndexes_management(Connection, "SYSTEM", GisDataList, "ALL") arcpy.AnalyzeDatasets_management(Connection, "SYSTEM", GisDataList, "ANALYZE_BASE", "ANALYZE_DELTA", "ANALYZE_ARCHIVE") print "Indexes and statistics for GIS tables in " + Connection + " have been rebuilt" print "Script ran successfully"
logFile.write('{0}\n\n'.format(arcpy.GetMessages())) except: logFile.write("Reconcile Failed.\n\n") messageText = arcpy.GetMessages() logFile.write('%s\n\n'%messageText) try: arcpy.AnalyzeDatasets_management(workspace, "SYSTEM", dataList, "ANALYZE_BASE", "ANALYZE_DELTA", "ANALYZE_ARCHIVE") logFile.write('{0}\n\n'.format(arcpy.GetMessages())) except: logFile.write("Pre compress statistics failed.\n\n") messageText = arcpy.GetMessages() logFile.write('%s\n\n'%messageText) try: #compress sde database arcpy.Compress_management(dbConnection) logFile.write('{0}\n\n'.format(arcpy.GetMessages())) except: logFile.write("Compress Failed.\n\n") messageText = arcpy.GetMessages() logFile.write('%s\n\n'%messageText) try: # Rebuild indexes and analyze the states and states_lineages system tables arcpy.RebuildIndexes_management(workspace, "SYSTEM", dataList, "ALL") logFile.write('{0}\n\n'.format(arcpy.GetMessages())) arcpy.AnalyzeDatasets_management(workspace, "SYSTEM", dataList, "ANALYZE_BASE", "ANALYZE_DELTA", "ANALYZE_ARCHIVE") logFile.write('{0}\n\n'.format(arcpy.GetMessages()))
def mainFunction(geodatabase,disconnectUsers): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # If disconnecting users if (disconnectUsers == "true"): # Block any new connections to the geodatabase arcpy.AcceptConnections(geodatabase, False) arcpy.AddMessage("Disconnecting all users from " + geodatabase + "...") # Logging if (enableLogging == "true"): logger.info("Disconnecting all users from " + geodatabase + "...") arcpy.DisconnectUser(geodatabase, "ALL") # Compress the geodatabase arcpy.AddMessage("Compressing geodatabase - " + geodatabase + "...") # Logging if (enableLogging == "true"): logger.info("Compressing geodatabase - " + geodatabase + "...") arcpy.env.workspace = geodatabase arcpy.Compress_management(geodatabase) # Load in datasets to a list dataList = arcpy.ListTables() + arcpy.ListFeatureClasses() + arcpy.ListDatasets() # Load in datasets from feature datasets to the list for dataset in arcpy.ListDatasets("", "Feature"): arcpy.env.workspace = os.path.join(geodatabase,dataset) dataList += arcpy.ListFeatureClasses() + arcpy.ListDatasets() # Reset the workspace arcpy.env.workspace = geodatabase # Get the user name for the workspace userName = arcpy.Describe(geodatabase).connectionProperties.user.lower() # Remove any datasets that are not owned by the connected user. userDataList = [ds for ds in dataList if ds.lower().find(".%s." % userName) > -1] # Execute rebuild indexes arcpy.AddMessage("Rebuilding the indexes for all tables in the database....") # Logging if (enableLogging == "true"): logger.info("Rebuilding the indexes for all tables in the database....") # Note: to use the "SYSTEM" option the workspace user must be an administrator. arcpy.RebuildIndexes_management(geodatabase, "SYSTEM", userDataList, "ALL") # Execute analyze datasets arcpy.AddMessage("Analyzing and updating the database statistics....") # Logging if (enableLogging == "true"): logger.info("Analyzing and updating the database statistics....") # Note: to use the "SYSTEM" option the workspace user must be an administrator. arcpy.AnalyzeDatasets_management(geodatabase, "SYSTEM", userDataList, "ANALYZE_BASE","ANALYZE_DELTA","ANALYZE_ARCHIVE") # If disconnecting users if (disconnectUsers == "true"): # Allow any new connections to the geodatabase arcpy.AddMessage("Allowing all users to connect to " + geodatabase + "...") # Logging if (enableLogging == "true"): logger.info("Allowing all users to connect to " + geodatabase + "...") arcpy.AcceptConnections(geodatabase, True) # --------------------------------------- End of code --------------------------------------- # # If called from gp tool return the arcpy parameter if __name__ == '__main__': # Return the output if there is any if output: arcpy.SetParameterAsText(1, output) # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If arcpy error except arcpy.ExecuteError: # Build and show the error message errorMessage = arcpy.GetMessages(2) arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage) # If python error except Exception as e: errorMessage = "" # Build and show the error message for i in range(len(e.args)): if (i == 0): # Python version check if sys.version_info[0] >= 3: # Python 3.x errorMessage = str(e.args[i]).encode('utf-8').decode('utf-8') else: # Python 2.x errorMessage = unicode(e.args[i]).encode('utf-8') else: # Python version check if sys.version_info[0] >= 3: # Python 3.x errorMessage = errorMessage + " " + str(e.args[i]).encode('utf-8').decode('utf-8') else: # Python 2.x errorMessage = errorMessage + " " + unicode(e.args[i]).encode('utf-8') arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
"sde.DEFAULT", versionList, "LOCK_ACQUIRED", "NO_ABORT", "BY_OBJECT", "FAVOR_TARGET_VERSION", "POST", "KEEP_VERSION", sys.path[0] + "/reclog.txt") LogMessage('Reconcile and post executed successfully.') LogMessage('Reconcile Log is below.') #warning this can be very long. LogMessage(open(sys.path[0] + "/reclog.txt", 'r').read()) except: LogMessage('Reconcile & post failed. Error message below.' + arcpy.GetMessages()) # Run the compress tool. try: LogMessage("Running compress") arcpy.Compress_management(adminConn) #if the compress is successful add a message. LogMessage('Compress was successful.') except: #If the compress failed, add a message. LogMessage('\nCompress failed: error message below.' + arcpy.GetMessages()) #Update statistics and idexes for the system tables # Note: to use the "SYSTEM" option the user must be an geodatabase or database administrator. try: LogMessage("Rebuilding indexes on the system tables") arcpy.RebuildIndexes_management(adminConn, "SYSTEM") LogMessage('Rebuilding of system table indexes successful.') except: LogMessage( 'Rebuild indexes on system tables fail: error message below.\n\r' +
versionList, "LOCK_ACQUIRED", "NO_ABORT", "BY_OBJECT", "FAVOR_TARGET_VERSION", "POST", "DELETE_VERSION", "") print "Running compress" con = pyodbc.connect( "DRIVER={SQL Server};Server=GIS3;DATABASE=water_test;Trusted_Connection = Yes" ) cur = con.cursor() row_count = len( cur.execute("select * from sde.sde_state_lineages").fetchall()) #print cur.execute("select * from sde.sde_state_lineages").fetchall() print "SDE is at State " + str(row_count - 1) while row_count <> 1: #compress the database until State 0 is achieved (i.e. one record exists in the lineage table) arcpy.Compress_management(sde) row_count = len( cur.execute("select * from sde.sde_state_lineages").fetchall()) #print cur.execute("select * from sde.sde_state_lineages").fetchall() print "SDE is at State " + str(row_count - 1) cur.close con.close print "Compress successfully completed!" print "Allow users to connect to " + str(sde) arcpy.AcceptConnections( sde, True) #allow the database to begin accepting connections again print "Rebuilding indexes on the system tables" arcpy.RebuildIndexes_management( sde, "SYSTEM") #rebuild indexes on the system tables print "Updating statistics on the system tables"
# Reconcile edits arcpy.ReconcileVersions_management( SDE, reconcile_mode="ALL_VERSIONS", target_version="dbo.DEFAULT", edit_versions=version_list, acquire_locks="LOCK_AQUIRED", abort_if_conflicts="NO_ABORT", conflict_definition="BY_OBJECT", conflict_resolution="FAVOR_TARGET_VERSION", with_post="POST", with_delete="KEEP_VERSION", out_log=os.path.join(file_loc, 'reconcile_log')) # Compress database and reopen to outside connections logging.info('Compressing database...') arcpy.Compress_management(SDE) arcpy.AcceptConnections(SDE, True) logging.debug('Open to connections') except: print("There was an error!") logging.error( "An error has occured while reconciling and compressing versions", exc_info=True) arcpy.AcceptConnections(SDE, True) # Rebuild indexes and update statistics try: logging.info("Rebuilding indexes...") arcpy.RebuildIndexes_management(SDE, "SYSTEM") logging.info("Updating statistics") arcpy.AnalyzeDatasets_management(SDE, "SYSTEM")
arcpy.AcceptConnections(workspace, False) # Disconnect Users arcpy.DisconnectUser(workspace, "ALL") # Disconnect all users from the database. arcpy.DisconnectUser('Database Connections/[email protected]_EGDB_20170105A.sde', "ALL") # Get a list of versions to pass into the ReconcileVersions tool. versionList = arcpy.ListVersions('Database Connections/[email protected]_EGDB_20170105A.sde') # Execute the ReconcileVersions tool. arcpy.ReconcileVersions_management('Database Connections/[email protected]_EGDB_20170105A.sde', "ALL_VERSIONS", "sde.DEFAULT", versionList, "LOCK_ACQUIRED", "ABORT_CONFLICTS", "BY_OBJECT", "FAVOR_TARGET_VERSION", "POST", "KEEP_VERSION", r"C:/Users/daniel.scott/Desktop/log.txt") # Compress database arcpy.Compress_management(workspace) print 'Compression Complete' # Get a list of stand alone feature classes dataList = arcpy.ListFeatureClasses() # Add Feature classes in datasets for dataset in arcpy.ListDatasets("", "Feature"): arcpy.env.workspace = os.path.join(workspace, dataset) dataList += arcpy.ListFeatureClasses() # reset the workspace arcpy.env.workspace = workspace # Execute rebuild indexes arcpy.RebuildIndexes_management(workspace, "SYSTEM", dataList, "ALL")
# Admin directory LogFile = open( r"W:\daniel.scott\arcgis\egdb\Phase1\EGDBmaintenance_" + datetime120 + ".csv", "w") ## Begin Script ownerDB = os.listdir(OwnerFileDir) for DB in ownerDB: try: arcpy.env.workspace = OwnerFileDir arcpy.AddMessage("Not Disconnecting users from " + DB + "\n") LogFile.write("Not Disconnecting users from," + DB + "\n") #arcpy.AcceptConnections(DB, False) arcpy.DisconnectUser(DB, "ALL") arcpy.AddMessage("Compressing " + DB + "\n") LogFile.write("Compressing," + DB + "\n") arcpy.Compress_management(DB) arcpy.AddMessage("Allowing connections to " + DB + "\n") LogFile.write("Allowing connections to," + DB + "\n") arcpy.AcceptConnections(DB, True) except arcpy.ExecuteError: msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) arcpy.AddMessage(msgs) LogFile.write(msgs) ErrorCount = ErrorCount + 1 except: if (arcpy.DisconnectUser == "true"): arcpy.AcceptConnections(DB, True) finally: pass # Rebuild indexes and analyze the states and states_lineages system tables for each EGDB connection file found in the admin directory
def clean_up(self): arcpy.ClearWorkspaceCache_management(self.workspace) arcpy.Compress_management(self.workspace)
output = open(LogFile, "w") output.write("GISCompress\n") output.write(strmsg1) for fileLine in workspacelist: ENV.workspace = fileLine strmsg1 = "Compress Workspace: " + fileLine + "\n" print strmsg1 output.write(strmsg1) try: versionList = arcpy.ListVersions(fileLine) for version in versionList: if version.find("{")>-1: print "Delete Version: " + version arcpy.DeleteVersion_management(fileLine, version) print arcpy.GetMessages().encode("gb2312") + "\n" output.write(arcpy.GetMessages().encode("gb2312")+ "\n") arcpy.Compress_management(fileLine) print arcpy.GetMessages().encode("gb2312") + "\n" output.write(arcpy.GetMessages().encode("gb2312")+ "\n") except: print arcpy.GetMessages().encode("gb2312") + "\n" output.write(arcpy.GetMessages().encode("gb2312")+ "\n") Date = time.strftime("%m-%d-%Y", time.localtime())# Set the date. Time = time.strftime("%I:%M:%S %p", time.localtime()) # Set the time. output.write(str("Process completed at " + str(Date) + " " + str(Time) + "." + "\n")) # Write the start time to the log file. output.close() # Closes the log file. print "Process completed at " + str(Date) + " " + str(Time) + "."
write_log(recMsg, log) # =================== ======== ======================================================================================= ========== ##COMPRESS## # =================== ======== ======================================================================================= ========== try: #identify the log file log = 'C:\Scripts_2016\Dans_Scripts\Log\CompressLog.txt' write_log( "-----------------------------------------------------------", log) write_log("COMPRESS", log) write_log( "-----------------------------------------------------------", log) write_log("Starting to run compress...", log) # Run the compress tool on the sde connection. arcpy.Compress_management(SDEconnection) # if the compress is successful add a message. write_log("\t Compress success", log) except: # If the compress failed, add a message. write_log("\t Compress failed") write_log(arcpy.GetMessages(), log) try: time.sleep(600) except: write_log("Unable to sleep", log) # =================== ======== ======================================================================================= ========== # #Analyse system tables## # =================== ======== ======================================================================================= ==========
# Get a list of versions to pass into the ReconcileVersions tool main_log.write("Rec and Post Edit version to Default\n") versionList = arcpy.ListVersions(config['arcgis']['sdeFile']) # Process: Reconcile and Post MasterEdit to Default arcpy.ReconcileVersions_management(config['arcgis']['sdeFile'], "ALL_VERSIONS", defaultVersion, editVersion, "LOCK_ACQUIRED", "ABORT_CONFLICTS", "BY_OBJECT", "FAVOR_EDIT_VERSION", "POST", "KEEP_VERSION") main_log.write(arcpy.GetMessages() + "\n") # Process: Compress main_log.write("Compress DB\n") arcpy.Compress_management(config['arcgis']['sdeFile']) main_log.write(arcpy.GetMessages() + "\n") # Process: Reconcile All Versions main_log.write("Reconcile all the versions\n") arcpy.ReconcileVersions_management(config['arcgis']['sdeFile'], "ALL_VERSIONS", defaultVersion, versionList, "LOCK_ACQUIRED", "ABORT_CONFLICTS", "BY_OBJECT", "FAVOR_TARGET_VERSION", "NO_POST", "KEEP_VERSION") main_log.write(arcpy.GetMessages() + "\n") main_log.write("ArcPY POST And Rec Complete\n") # Analyze and Vacuum postgres main_log.write("Connecting to the db directly through pg\n") pgConnection = "host={} dbname={} user={} password={}".format(
def ship(self): try: #: Run commands as user SDE to compress and analyze database and system tables sdeconnection = join(self.garage, 'SGID10', 'SGID_sde@[email protected]') arcpy.Compress_management(sdeconnection) self.log.info('Compress Complete') #: System table analyze was giving problems so it had to go for now. # arcpy.AnalyzeDatasets_management(sdeconnection, 'SYSTEM') # print 'Analyze System Tables Complete' userconnections = [ join(self.garage, 'SGID10', 'SGID_Biosciense@[email protected]'), join(self.garage, 'SGID10', 'SGID_Boundaries@[email protected]'), join(self.garage, 'SGID10', 'SGID_Cadastre@[email protected]'), join(self.garage, 'SGID10', 'SGID_Climate@[email protected]'), join(self.garage, 'SGID10', 'SGID_Demographics@[email protected]'), join(self.garage, 'SGID10', 'SGID_Economy@[email protected]'), join(self.garage, 'SGID10', 'SGID_Elevation@[email protected]'), join(self.garage, 'SGID10', 'SGID_Energy@[email protected]'), join(self.garage, 'SGID10', 'SGID_Environment@[email protected]'), join(self.garage, 'SGID10', 'SGID_Farming@[email protected]'), join(self.garage, 'SGID10', 'SGID_Geoscience@[email protected]'), join(self.garage, 'SGID10', 'SGID_Health@[email protected]'), join(self.garage, 'SGID10', 'SGID_History@[email protected]'), join(self.garage, 'SGID10', 'SGID_Indices@[email protected]'), join(self.garage, 'SGID10', 'SGID_Location@[email protected]'), join(self.garage, 'SGID10', 'SGID_Planning@[email protected]'), join(self.garage, 'SGID10', 'SGID_Political@[email protected]'), join(self.garage, 'SGID10', 'SGID_Raster@[email protected]'), join(self.garage, 'SGID10', 'SGID_Recreation@[email protected]'), join(self.garage, 'SGID10', 'SGID_Society@[email protected]'), join(self.garage, 'SGID10', 'SGID_Transportation@[email protected]'), join(self.garage, 'SGID10', 'SGID_Utilities@[email protected]'), join(self.garage, 'SGID10', 'SGID_Water@[email protected]') ] for con in userconnections: # set workspace # the user in this workspace must be the owner of the data to analyze. workspace = con # set the workspace environment arcpy.env.workspace = workspace # NOTE: Analyze Datasets can accept a Python list of datasets. # Get a list of all the datasets the user has access to. # First, get all the stand alone tables, feature classes and rasters. dataList = arcpy.ListTables() + arcpy.ListFeatureClasses( ) + arcpy.ListRasters() # Next, for feature datasets get all of the datasets and featureclasses # from the list and add them to the master list. for dataset in arcpy.ListDatasets('', 'Feature'): arcpy.env.workspace = join(workspace, dataset) dataList += arcpy.ListFeatureClasses( ) + arcpy.ListDatasets('', 'Feature') # reset the workspace arcpy.env.workspace = workspace # Get the user name for the workspace userName = arcpy.Describe( workspace).connectionProperties.user.lower() # remove any datasets that are not owned by the connected user. userDataList = [ ds for ds in dataList if ds.lower().find('.%s.' % userName) > -1 ] # Execute analyze datasets # Note: to use the 'SYSTEM' option the workspace user must be an administrator. if len(dataList) > 0: arcpy.AnalyzeDatasets_management(workspace, 'NO_SYSTEM', userDataList, 'ANALYZE_BASE', 'ANALYZE_DELTA', 'ANALYZE_ARCHIVE') self.log.info('Analyze Complete') except Exception: self.send_email('*****@*****.**', 'Error with {}'.format(__file__), format_exc()) raise
#loop version keys and delete versions if the exist if 'versions' in k: ver = k['versions'] if k['out_folder_path'] is not None: deletever(ver, k['out_folder_path'] + k['out_name']) else: deletever(ver, k['out_name']) #compress for k in connections: print 'Start Compress versions.' print k['out_name'] #loop version keys and compress sde this compress state tree if k['out_folder_path'] is not None: try: arcpy.Compress_management(k['out_folder_path'] + k['out_name']) arcpy.Compress_management(k['out_folder_path'] + k['out_name']) arcpy.Compress_management(k['out_folder_path'] + k['out_name']) except: logger.error("Compress version " + k['out_folder_path'] + k['out_name'] + " Failed.") else: try: arcpy.Compress_management(k['out_name']) except: logger.error("Compress version " + k['out_name'] + " Failed.") #Create for k in connections: print 'Start Create versions.' #loop version keys and re-create versions