def main(arcpy): print "Running Main" # Local variables: stlco_sde_prod = connect("stlco_sde_prod", "sssgisdb1") stlco_sde_dw = connect("stlco_sde_dw", "sssgisdb1") e911_sde_prod_10 = connect("e911_sde_prod", "sssgisdb1") #Replica Names prod_to_e911_10 = "SDEDBO.prod_to_e911" Prod_DataWarehouse_1 = "SDEDBO.Prod_DataWarehouse_1" Prod_DataWarehouse_2 = "SDEDBO.Prod_DataWarehouse_2" Prod_DataWarehouse_3 = "SDEDBO.Prod_DataWarehouse_3" sync_dir = "FROM_GEODATABASE1_TO_2" conflict_policy = "" #Not Applicable for One Way Replica conflict_detection = "" #Not Applicable for One Way Replica reconcile = "" #Not Applicable for One Way Replica try: print "Starting Sync" logging.debug(datetime.datetime.now().isoformat() + ": " + "Start Prod_DataWarehouse_1") # Process: Synchronize Jurisdictions, Trails, Zoning Petitions, and Zoning from prod to dw arcpy.SynchronizeChanges_management(stlco_sde_prod, Prod_DataWarehouse_1, stlco_sde_dw, sync_dir, conflict_policy, conflict_detection, reconcile) logging.debug(datetime.datetime.now().isoformat() + ": " + "Start Prod_DataWarehouse_2") # Process: Synchronize AddrPts and Street_Centerlines from prod to dw arcpy.SynchronizeChanges_management(stlco_sde_prod, Prod_DataWarehouse_2, stlco_sde_dw, sync_dir, conflict_policy, conflict_detection, reconcile) logging.debug(datetime.datetime.now().isoformat() + ": " + "Start prod_to_e911_10") # Process: Synchronize COGIS Dataset, ESN, and PSAP from prod to e911 v10 arcpy.SynchronizeChanges_management(stlco_sde_prod, prod_to_e911_10, e911_sde_prod_10, sync_dir, conflict_policy, conflict_detection, reconcile) logging.debug(datetime.datetime.now().isoformat() + ": " + "Start Prod_DataWarehouse_3") # Process: Synchronize COGIS, ESN, and PSAP from prod to dw arcpy.SynchronizeChanges_management(stlco_sde_prod, Prod_DataWarehouse_3, stlco_sde_dw, sync_dir, conflict_policy, conflict_detection, reconcile) logging.debug(datetime.datetime.now().isoformat() + ": " + "End Synchronization") except Exception as e: print e logging.debug(datetime.datetime.now().isoformat() + ": " + "ERROR in Synchronization") logging.error(e)
def _syncWithProd(self): func = '_syncWithProd' logging.info("Begin " + func) try: logging.debug("Synchronizing data from production to staging") arcpy.SynchronizeChanges_management(self._stagingWorkspace(), self._replica(), self._productionWorkspace(), "FROM_GEODATABASE1_TO_2", "IN_FAVOR_OF_GDB1", "BY_OBJECT", "DO_NOT_RECONCILE") logging.debug( "Finished synchronizing data from production to staging") logging.debug("Compressing data in Production SDE") arcpy.Compress_management(self._productionWorkspace()) logging.debug("Finished compressing data in Production SDE") except arcpy.ExecuteError: msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) logging.error("ArcGIS error: %s", msgs) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] msg = "Error in " + func + ":\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) arcpy.AddError(msg) logging.error(msg) logging.info("End sync with staging") return
def SyncReplicaDatasets(CeoReplicas, McReplicas): ''' This function takes replicas from the CEO and MAC databases and syncs changes that have been made to the releveant child datasets in the IAMUW_REPLICATION database on the pub server. ''' for replica in CeoReplicas: arcpy.SynchronizeChanges_management( r"Database Connections\IAMUW-FS_CEO.sde", replica, r"Database Connections\IAMUW_REPLICATION.sde", "FROM_GEODATABASE2_TO_1", "IN_FAVOR_OF_GDB2", "BY_OBJECT") print replica + ' has been synced' for replica in McReplicas: arcpy.SynchronizeChanges_management( r"Database Connections\IAMUW-FS_MAC.sde", replica, r"Database Connections\IAMUW_REPLICATION.sde", "FROM_GEODATABASE2_TO_1", "IN_FAVOR_OF_GDB2", "BY_OBJECT") print replica + ' has been synced'
def sync_with_staging(): logging.info("Begin sync with staging") try: logging.debug("Synchronizing data from production to staging") arcpy.SynchronizeChanges_management(production_workspace, prod_schema, staging_gis_edit_workspace, "FROM_GEODATABASE1_TO_2", "IN_FAVOR_OF_GDB1", "BY_OBJECT", "DO_NOT_RECONCILE") logging.debug("Finished synchronizing data from production to staging") except arcpy.ExecuteError: msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) logging.error("ArcGIS error: %s", msgs) except Exception as e: msg = e.message arcpy.AddError(msg) logging.error("Python error: %s", msg) logging.info("End sync with staging") return;
def mainFunction(sourceGeodatabase,replicatedGeodatabase,replicaName,featureClasses,tables): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Split the replicas string in case there are more than one replica = string.split(replicaName, ",") # For each replica for replica in replica: printMessage("Syncing changes for " + replica + " replica...","info") # Logging if (enableLogging == "true"): logger.info("Syncing changes for " + replica + " replica...") # Sync changes between databases for the replica arcpy.SynchronizeChanges_management(sourceGeodatabase, replica, replicatedGeodatabase, "FROM_GEODATABASE1_TO_2", "IN_FAVOR_OF_GDB1", "BY_OBJECT", "DO_NOT_RECONCILE") # Copy over views and custom tables - Copy used, will fail if locks present on FGDB. if (len(tables) > 0): # Remove out apostrophes tableList = string.split(str(tables).replace("'", ""), ";") # Copy over tables for table in tableList: printMessage("Copying Over Table " + table + "...","info") # Logging if (enableLogging == "true"): logger.info("Copying Over Table " + table + "...") tableName = arcpy.Describe(table) # Change dataset name to be just name (remove user and schema if SDE database) splitDataset = tableName.name.split('.') dataset = splitDataset[-1] arcpy.CopyRows_management(table, os.path.join(replicatedGeodatabase, dataset), "") if (len(featureClasses) > 0): # Remove out apostrophes featureClassList = string.split(str(featureClasses).replace("'", ""), ";") # Copy over feature classes for featureClass in featureClassList: printMessage("Copying Over Feature Class " + featureClass + "...","info") # Logging if (enableLogging == "true"): logger.info("Copying Over Feature Class " + featureClass + "...") featureClassName = arcpy.Describe(featureClass) # Change dataset name to be just name (remove user and schema if SDE database) splitDataset = featureClassName.name.split('.') dataset = splitDataset[-1] arcpy.CopyFeatures_management(featureClass, os.path.join(replicatedGeodatabase, dataset), "", "0", "0", "0") # --------------------------------------- End of code --------------------------------------- # # If called from gp tool return the arcpy parameter if __name__ == '__main__': # Return the output if there is any if output: # If ArcGIS desktop installed if (arcgisDesktop == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If arcpy error except arcpy.ExecuteError: # Build and show the error message errorMessage = arcpy.GetMessages(2) printMessage(errorMessage,"error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage) # If python error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): # Python version check if sys.version_info[0] >= 3: # Python 3.x errorMessage = str(e.args[i]).encode('utf-8').decode('utf-8') else: # Python 2.x errorMessage = unicode(e.args[i]).encode('utf-8') else: # Python version check if sys.version_info[0] >= 3: # Python 3.x errorMessage = errorMessage + " " + str(e.args[i]).encode('utf-8').decode('utf-8') else: # Python 2.x errorMessage = errorMessage + " " + unicode(e.args[i]).encode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage,"error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
def sync_back(sde_gdb, file_gdb, checkout_name): arcpy.SynchronizeChanges_management(file_gdb, checkout_name, sde_gdb, "FROM_GEODATABASE1_TO_2", "IN_FAVOR_OF_GDB1", "", "TRUE")
%s """ % (FROM, ", ".join(TO), SUBJECT, MSG) # Send the mail if filteredEmail try: server = smtplib.SMTP(SERVER) server.sendmail(FROM, TO, MESSAGE) server.quit() except: pass try: #Weekly synchronization of County parcels arcpy.SynchronizeChanges_management( "GIS Servers/gis on #### (user)/GeoData/####y_GeoData.GeoDataServer", "DBO.####sParcels", "Database Connections/####.sde", "FROM_GEODATABASE1_TO_2", "IN_FAVOR_OF_GDB1", "BY_OBJECT", "DO_NOT_RECONCILE") print 'Synchronized parcels from County' #block new connections to the database. arcpy.AcceptConnections('Database Connections/###.sde', False) # wait 10 minutes time.sleep(300) #disconnect all users from the database. arcpy.DisconnectUser('Database Connections/###.sde', "ALL") #reconcile users to QC arcpy.ReconcileVersions_management(
# env.workspace = "Database Connections/RPUD_TRANSDB.sde" ## set up variables replica_gdb1 = "Database Connections/RPUD_TRANSDB.sde" #parent replica_gdb2 = "//corfile/common/Public Utilities/CCTV/CCTV.gdb" #child replica_name = "RPUD.CCTV_gistprd_to_cctv_fgdb" #Replica pre-defined and registered on RPUD sync_direction = "FROM_GEODATABASE1_TO_2" ## compare replica schema print("Comparing replica schema...") arcpy.AddMessage("Comparing replica schema...") outputXML = "C:/ReplicaSchema/replicaSchema.xml" # overwriteXML(outputXML) arcpy.ExportReplicaSchema_management(replica_gdb1, outputXML, replica_name) schemaChangeXML = "C:/ReplicaSchema/schemaChange.xml" # overwriteXML(schemaChangeXML) arcpy.CompareReplicaSchema_management(replica_gdb2, outputXML, schemaChangeXML) arcpy.ImportReplicaSchema_management(replica_gdb2, schemaChangeXML) ## synchronize change print("Updating CCTV data from {0} to {1}...".format(replica_gdb1, replica_gdb2)) arcpy.AddMessage("Updating CCTV data from {0} to {1}...".format( replica_gdb1, replica_gdb2)) # sync changes arcpy.SynchronizeChanges_management(replica_gdb1, replica_name, replica_gdb2, sync_direction) print("Sync complete.") arcpy.AddMessage("Sync complete.")
def append_to_esri_source(self, input_fc, esri_output_fc, input_where_clause): """ Append to the esri source FC, including projecting to output coord system, creating a versioned FC of the output in SDE, deleting from that versioned FC based on a where_clause, appending the new data, and then posting that version :param input_fc: the input source data :param esri_output_fc: the output in SDE :param input_where_clause: where clause used to add to/delete from the output esri FC :return: """ logging.info( 'Starting vector_layer.append_to_esri_source for {0}'.format( self.name)) fc_to_append = self.project_to_output_srs(input_fc, esri_output_fc) logging.info('appending {} to {}'.format(input_fc, esri_output_fc)) logging.debug('Creating a versioned FL from esri_service_output') arcpy.MakeFeatureLayer_management(esri_output_fc, "esri_service_output_fl") version_name = self.name + "_" + str(int(time.time())) sde_workspace = os.path.dirname(esri_output_fc) desc = arcpy.Describe(sde_workspace) if hasattr(desc, "datasetType") and desc.datasetType == 'FeatureDataset': sde_workspace = os.path.dirname(sde_workspace) del desc if os.path.splitext(sde_workspace)[1] != '.sde': logging.error('Could not find proper SDE workspace. Exiting.') sys.exit(1) arcpy.CreateVersion_management(sde_workspace, "sde.DEFAULT", version_name, "PRIVATE") arcpy.ChangeVersion_management("esri_service_output_fl", 'TRANSACTIONAL', 'gfw.' + version_name, '') if input_where_clause: logging.debug( 'Deleting features from esri_service_output feature layer based on input_where_clause. ' 'SQL statement: {0}'.format(input_where_clause)) arcpy.SelectLayerByAttribute_management("esri_service_output_fl", "NEW_SELECTION", input_where_clause) # Delete the features selected by the input where_clause arcpy.DeleteRows_management("esri_service_output_fl") else: logging.debug( 'No where clause for esri_service_output found; deleting all features before ' 'appending from source') sde_sql_conn = arcpy.ArcSDESQLExecute(sde_workspace) esri_fc_name = os.path.basename(esri_output_fc) print esri_fc_name # why this, exactly? # there's also a lbr_plantations_old feature class (for some reason) # and lbr_plantation_evw points to that. # I don't know why and I don't have time to fix it # WDPA test - leave off if esri_fc_name != 'gfw_countries.gfw.lbr_plantations': esri_fc_name += '_evw' # Find the min and max OID values to_delete_oid_field = [ f.name for f in arcpy.ListFields(esri_output_fc) if f.type == 'OID' ][0] sql = 'SELECT min({0}), max({0}) from {1}'.format( to_delete_oid_field, esri_fc_name) to_delete_min_oid, to_delete_max_oid = sde_sql_conn.execute(sql)[0] # If there are features to delete, do it if to_delete_min_oid and to_delete_max_oid: for wc in util.generate_where_clause(to_delete_min_oid, to_delete_max_oid, to_delete_oid_field, 1000): logging.debug('Deleting features with {0}'.format(wc)) arcpy.MakeFeatureLayer_management("esri_service_output_fl", "fl_to_delete", wc) arcpy.DeleteRows_management("fl_to_delete") arcpy.Delete_management("fl_to_delete") else: pass esri_output_pre_append_count = int( arcpy.GetCount_management("esri_service_output_fl").getOutput(0)) input_feature_count = int( arcpy.GetCount_management(fc_to_append).getOutput(0)) logging.debug('Starting to append to esri_service_output') # don't need to batch append if it's coming from an SDE data source # these are used exclusively by country-vector layers # and the data is generally small, compared to things like WDPA if 'sde' in fc_to_append: logging.debug( "Appending all features from {}- no wc because it's an SDE input" .format(fc_to_append)) arcpy.MakeFeatureLayer_management(fc_to_append, "fl_to_append") arcpy.Append_management("fl_to_append", "esri_service_output_fl", "NO_TEST") arcpy.Delete_management("fl_to_append") else: # Find the min and max OID values to_append_oid_field = [ f.name for f in arcpy.ListFields(fc_to_append) if f.type == 'OID' ][0] to_append_min_oid, to_append_max_oid = cartodb.ogrinfo_min_max( fc_to_append, to_append_oid_field) for wc in util.generate_where_clause(to_append_min_oid, to_append_max_oid, to_append_oid_field, 1000): logging.debug('Appending features with {0}'.format(wc)) arcpy.MakeFeatureLayer_management(fc_to_append, "fl_to_append", wc) arcpy.Append_management("fl_to_append", "esri_service_output_fl", "NO_TEST") arcpy.Delete_management("fl_to_append") logging.debug('Append finished, starting to reconcile versions') arcpy.ReconcileVersions_management( input_database=sde_workspace, reconcile_mode="ALL_VERSIONS", target_version="sde.DEFAULT", edit_versions='gfw.' + version_name, acquire_locks="LOCK_ACQUIRED", abort_if_conflicts="NO_ABORT", conflict_definition="BY_OBJECT", conflict_resolution="FAVOR_TARGET_VERSION", with_post="POST", with_delete="KEEP_VERSION", out_log="") logging.debug('Deleting temporary FL and temporary version') # For some reason need to run DeleteVersion_management here, will have errors if with_delete is used above arcpy.Delete_management("esri_service_output_fl") arcpy.DeleteVersion_management(sde_workspace, 'gfw.' + version_name) post_append_count = int( arcpy.GetCount_management(esri_output_fc).getOutput(0)) if esri_output_pre_append_count + input_feature_count == post_append_count: logging.debug('Append successful based on sum of input features') else: logging.debug( 'esri_output_pre_append_count: {0}\input_feature_count: {1}\npost_append_count{2}\n' 'Append failed, sum of input features does not match. ' 'Exiting'.format(esri_output_pre_append_count, input_feature_count, post_append_count)) sys.exit(1) # now that we've finished syncing everything locally, need to push it to the PROD replica local_sde = r'D:\scripts\connections\gfw (gfw@localhost).sde' prod_gdb = r'P:\data\gfw_database\gfw.gdb' arcpy.SynchronizeChanges_management(local_sde, "gfw.GFW", prod_gdb, "FROM_GEODATABASE1_TO_2") return
def main(argv): print 'start' try: opts, args = getopt.getopt(argv,"p:c:i:l:",["parentconn=","childconn=","inputlist=","logfile="]) except getopt.GetoptError: print 'test.py -p <parentconnection> -a <parenttype> -c <childconnection> -h <childtype> -i <inputlist> -l <logfile>' sys.exit(2) print 'parse options' for o, a in opts: if o in ("-p", "--parentconn"): ParentConn = a elif o in ("-c", "--childconn"): ChildConn = a elif o in ("-i", "--inputlist"): InputList = a elif o in ("-l", "--logfile"): LOG_FILENAME = a else: assert False, "unhandled option" # Set up logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename= LOG_FILENAME, filemode='a') # Local variables: try: todo = csv.DictReader(open(InputList, 'r'), delimiter=',') except: logging.info('Failed to read input list!') sys.exit(2) for row in todo: print row ParentReplica = row.get('ParentReplica') ChildReplica = row.get('ChildReplica') Direction = row.get('Direction') ConflictRes = row.get('ConflictRes') ConflictDetect = row.get('ConflictDetect') # Verify parent replica blnFoundParent = False for parentrep in arcpy.da.ListReplicas(ParentConn): if parentrep.name == ParentReplica: blnFoundParent = True break if not blnFoundParent: logging.info(ParentReplica + " not found on parent") # Verify child replica blnFoundChild = False for childrep in arcpy.da.ListReplicas(ChildConn): if childrep.name == ChildReplica: blnFoundChild = True break if not blnFoundChild: logging.info(ChildReplica + " not found on child") if blnFoundChild & blnFoundParent: # Process: Synchronize Changes try: arcpy.SynchronizeChanges_management(ParentConn, ParentReplica, ChildConn, Direction, ConflictRes, ConflictDetect, "DO_NOT_RECONCILE") logging.info('Sync ' + ParentReplica) except: logging.info('Sync ' + ParentReplica + " - " + arcpy.GetMessages(2)) logging.info('Complete Sync *********************************')
logMsg = '' # get time stamp for start of tool starttime = time.clock() # SDE is parent geodatabase in replication # Change this to your SDE connection sde = r"SDE Connection" # Child file geodatabase in replication # Change this to your file geodatabase child_gdb = r"\\path\to\file.gdb" # Process: Synchronize Changes # Replicates data from parent to child geodatabase # update the name of the replication result = arcpy.SynchronizeChanges_management(sde, "Name of Replication", child_gdb, "FROM_GEODATABASE1_TO_2", "IN_FAVOR_OF_GDB1", "BY_OBJECT", "DO_NOT_RECONCILE") # Get the end time of the geoprocessing tool(s) finishtime = time.clock() # Get the total time to run the geoprocessing tool(s) elapsedtime = finishtime - starttime # write result messages to log # delay writing results until geoprocessing tool gets the completed code while result.status < 4: time.sleep(0.2) # store tool result message in a variable resultValue = result.getMessages() # add the tool's message to the log message logMsg += "completed {}\n".format(str(resultValue)) # add a more human readable message to log message
def sync_floorplans(sde_offline_floorplans, file_GDB_floorplans): """Each time before a check-out replica is created, the floorplans will be updated from floorplansoffline SDE.""" arcpy.AddMessage("Syncing") arcpy.SynchronizeChanges_management(sde_offline_floorplans, "DBO.floorplans_local", file_GDB_floorplans, "FROM_GEODATABASE1_TO_2", "", "", "") arcpy.AddMessage("Done Sync")