def getOIDs(targelUrl,expr): # get the list of oids. ids = [] arcpy.SetProgressor("default","Querying Existing Features") arcpy.SetProgressorLabel("Querying Existing Features") url = targelUrl + '/query' #dla.addMessage("Url:"+url) token = dla.getSigninToken() if expr != '': params = {'f': 'pjson', 'where': expr,'token':token,'returnIdsOnly':'true'} else: params = {'f': 'pjson', 'where': '1=1','token':token,'returnIdsOnly':'true'} #dla.addMessage("Params:"+json.dumps(params)) result = dla.sendRequest(url,params) try: if result['error'] != None: retval = False dla.addMessage("Query features from Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: ids = result['objectIds'] lenFound = len(ids) msg = str(lenFound) + " features found in existing Service" print(msg) dla.addMessage(msg) retval = True return ids
def deleteFeatures(sourceLayer,targelUrl,expr): # delete features using chunks of _chunkSize retval = False error = False # delete section ids = getOIDs(targelUrl,expr) try: lenDeleted = 100 #Chunk deletes using chunk size at a time featuresProcessed = 0 numFeat = len(ids) if numFeat == 0: dla.addMessage("0 Features to Delete, exiting") return True # nothing to delete is OK if numFeat > _chunkSize: chunk = _chunkSize else: chunk = numFeat arcpy.SetProgressor("default","Deleting Features") while featuresProcessed < numFeat and error == False: #Chunk deletes using chunk size at a time next = featuresProcessed + chunk msg = "Deleting features " + str(featuresProcessed) + ":" + str(next) dla.addMessage(msg) arcpy.SetProgressorLabel(msg) oids = ",".join(str(e) for e in ids[featuresProcessed:next]) url = targelUrl + '/deleteFeatures' token = dla.getSigninToken() params = {'f': 'pjson', 'objectIds': oids,'token':token} result = dla.sendRequest(url,params) try: if result['error'] != None: retval = False dla.addMessage("Delete features from Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: try: lenDeleted = len(result['deleteResults']) msg = str(lenDeleted) + " features deleted, " + str(featuresProcessed + chunk) + "/" + str(numFeat) print(msg) dla.addMessage(msg) retval = True except: retval = False error = True dla.showTraceback() dla.addMessage("Delete features from Feature Service failed") dla.addError(json.dumps(result)) featuresProcessed += chunk except: retval = False error = True dla.showTraceback() dla.addMessage("Delete features from Feature Service failed") pass return retval
def addFeatures(sourceLayer,targelUrl,expr): # add features using _chunkSize retval = False error = False # add section try: arcpy.SetProgressor("default","Adding Features") arcpy.SetProgressorLabel("Adding Features") featurejs = featureclass_to_json(sourceLayer) url = targelUrl + '/addFeatures' numFeat = len(featurejs['features']) if numFeat == 0: dla.addMessage("0 Features to Add, exiting") return True # nothing to add is OK if numFeat > _chunkSize: chunk = _chunkSize else: chunk = numFeat featuresProcessed = 0 while featuresProcessed < numFeat and error == False: next = featuresProcessed + chunk features = featurejs['features'][featuresProcessed:next] msg = "Adding features " + str(featuresProcessed) + ":" + str(next) dla.addMessage(msg) arcpy.SetProgressorLabel(msg) token = dla.getSigninToken() params = {'rollbackonfailure': 'true','f':'json', 'token':token, 'features': json.dumps(features)} result = dla.sendRequest(url,params) try: if result['error'] != None: retval = False dla.addMessage("Add features to Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: try: lenAdded = len(result['addResults']) msg = str(lenAdded) + " features added, " + str(featuresProcessed + chunk) + "/" + str(numFeat) print(msg) dla.addMessage(msg) retval = True except: retval = False dla.addMessage("Add features to Feature Service failed") dla.showTraceback() dla.addError(json.dumps(result)) error = True featuresProcessed += chunk except: retval = False dla.addMessage("Add features to Feature Service failed") dla.showTraceback() error = True pass return retval
def doPublishPro(sourceLayer, targelUrl, expr): # logic for publishing to service registered on Portal or ArcGIS Online retval = True token = dla.getSigninToken() if token == None: dla.addError("Unable to retrieve token, exiting") return False dla.setupProxy() if expr != '' and useReplaceSettings == True: retval = deleteFeatures(sourceLayer, targelUrl, expr) if retval == True: retval = addFeatures(sourceLayer, targelUrl, expr) return retval
def doPublishPro(sourceLayer,targelUrl,expr): # logic for publishing to service registered on Portal or ArcGIS Online retval = True token = dla.getSigninToken() if token == None: dla.addError("Unable to retrieve token, exiting") return False dla.setupProxy() if expr != '' and useReplaceSettings == True: retval = deleteFeatures(sourceLayer,targelUrl,expr) if retval == True: retval = addFeatures(sourceLayer,targelUrl,expr) return retval
def getOIDs(targelUrl, expr): # get the list of oids. ids = [] arcpy.SetProgressor("default", "Querying Existing Features") arcpy.SetProgressorLabel("Querying Existing Features") url = targelUrl + '/query' #dla.addMessage("Url:"+url) token = dla.getSigninToken() if expr != '': params = { 'f': 'pjson', 'where': expr, 'token': token, 'returnIdsOnly': 'true' } else: params = { 'f': 'pjson', 'where': '1=1', 'token': token, 'returnIdsOnly': 'true' } #dla.addMessage("Params:"+json.dumps(params)) result = dla.sendRequest(url, params) try: if result['error'] != None: retval = False dla.addMessage("Query features from Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: ids = result['objectIds'] lenFound = len(ids) msg = str(lenFound) + " features found in existing Service" print(msg) dla.addMessage(msg) retval = True return ids
def publish(xmlFileNames): # function called from main or from another script, performs the data update processing global sourceLayer, targetLayer, _success dla._errorCount = 0 arcpy.SetProgressor("default", "Data Assistant") arcpy.SetProgressorLabel("Data Assistant") xmlFiles = xmlFileNames.split(";") for xmlFile in xmlFiles: # multi value parameter, loop for each file dla.addMessage("Configuration file: " + xmlFile) xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document if xmlDoc == None: return svceS = False svceT = False if sourceLayer == "" or sourceLayer == None: sourceLayer = dla.getNodeValue(xmlDoc, "Source") svceS = dla.checkLayerIsService(sourceLayer) if targetLayer == "" or targetLayer == None: targetLayer = dla.getNodeValue(xmlDoc, "Target") svceT = dla.checkLayerIsService(targetLayer) dla.addMessage(targetLayer) ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer if not dla.checkServiceCapabilities(sourceLayer, True): return False if not dla.checkServiceCapabilities(targetLayer, True): return False if svceS == True or svceT == True: token = dla.getSigninToken( ) # when signed in get the token and use this. Will be requested many times during the publish if token == None: dla.addError( "User must be signed in for this tool to work with services" ) return expr = getWhereClause(xmlDoc) if useReplaceSettings == True and (expr == '' or expr == None): dla.addError( "There must be an expression for replacing by field value, current value = " + str(expr)) return False dla.setWorkspace() targetName = dla.getTargetName(xmlDoc) res = dlaExtractLayerToGDB.extract(xmlFile, None, dla.workspace, sourceLayer, targetName) if res != True: table = dla.getTempTable(targetName) msg = "Unable to export data, there is a lock on existing datasets or another unknown error" if arcpy.TestSchemaLock(table) != True: msg = "Unable to export data, there is a lock on the intermediate feature class: " + table dla.addError(msg) print(msg) return else: res = dlaFieldCalculator.calculate(xmlFile, dla.workspace, targetName, False) if res == True: dlaTable = dla.getTempTable(targetName) res = doPublish(xmlDoc, dlaTable, targetLayer) arcpy.ResetProgressor() sourceLayer = None # set source and target back to None for multiple file processing targetLayer = None if res == False: err = "Data Assistant Update Failed, see messages for details" dla.addError(err) print(err)
def addFeatures(sourceLayer, targelUrl, expr): # add features using _chunkSize retval = False error = False # add section try: arcpy.SetProgressor("default", "Adding Features") arcpy.SetProgressorLabel("Adding Features") featurejs = featureclass_to_json(sourceLayer) url = targelUrl + '/addFeatures' numFeat = len(featurejs['features']) if numFeat == 0: dla.addMessage("0 Features to Add, exiting") return True # nothing to add is OK if numFeat > _chunkSize: chunk = _chunkSize else: chunk = numFeat featuresProcessed = 0 while featuresProcessed < numFeat and error == False: next = featuresProcessed + chunk features = featurejs['features'][featuresProcessed:next] msg = "Adding features " + str(featuresProcessed) + ":" + str(next) dla.addMessage(msg) arcpy.SetProgressorLabel(msg) token = dla.getSigninToken() params = { 'rollbackonfailure': 'true', 'f': 'json', 'token': token, 'features': json.dumps(features) } result = dla.sendRequest(url, params) try: if result['error'] != None: retval = False dla.addMessage("Add features to Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: try: lenAdded = len(result['addResults']) msg = str(lenAdded) + " features added, " + str( featuresProcessed + chunk) + "/" + str(numFeat) print(msg) dla.addMessage(msg) retval = True except: retval = False dla.addMessage("Add features to Feature Service failed") dla.showTraceback() dla.addError(json.dumps(result)) error = True featuresProcessed += chunk except: retval = False dla.addMessage("Add features to Feature Service failed") dla.showTraceback() error = True pass return retval
def deleteFeatures(sourceLayer, targelUrl, expr): # delete features using chunks of _chunkSize retval = False error = False # delete section ids = getOIDs(targelUrl, expr) try: lenDeleted = 100 #Chunk deletes using chunk size at a time featuresProcessed = 0 numFeat = len(ids) if numFeat == 0: dla.addMessage("0 Features to Delete, exiting") return True # nothing to delete is OK if numFeat > _chunkSize: chunk = _chunkSize else: chunk = numFeat arcpy.SetProgressor("default", "Deleting Features") while featuresProcessed < numFeat and error == False: #Chunk deletes using chunk size at a time next = featuresProcessed + chunk msg = "Deleting features " + str(featuresProcessed) + ":" + str( next) dla.addMessage(msg) arcpy.SetProgressorLabel(msg) oids = ",".join(str(e) for e in ids[featuresProcessed:next]) url = targelUrl + '/deleteFeatures' token = dla.getSigninToken() params = {'f': 'pjson', 'objectIds': oids, 'token': token} result = dla.sendRequest(url, params) try: if result['error'] != None: retval = False dla.addMessage( "Delete features from Feature Service failed") dla.addMessage(json.dumps(result)) error = True except: try: lenDeleted = len(result['deleteResults']) msg = str(lenDeleted) + " features deleted, " + str( featuresProcessed + chunk) + "/" + str(numFeat) print(msg) dla.addMessage(msg) retval = True except: retval = False error = True dla.showTraceback() dla.addMessage( "Delete features from Feature Service failed") dla.addError(json.dumps(result)) featuresProcessed += chunk except: retval = False error = True dla.showTraceback() dla.addMessage("Delete features from Feature Service failed") pass return retval
def publish(xmlFileNames): # function called from main or from another script, performs the data update processing global sourceLayer,targetLayer,_success dla._errorCount = 0 arcpy.SetProgressor("default","Data Assistant") arcpy.SetProgressorLabel("Data Assistant") xmlFiles = xmlFileNames.split(";") for xmlFile in xmlFiles: # multi value parameter, loop for each file dla.addMessage("Configuration file: " + xmlFile) xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document if xmlDoc == None: return svceS = False svceT = False if sourceLayer == "" or sourceLayer == None: sourceLayer = dla.getNodeValue(xmlDoc,"Source") svceS = dla.checkLayerIsService(sourceLayer) if targetLayer == "" or targetLayer == None: targetLayer = dla.getNodeValue(xmlDoc,"Target") svceT = dla.checkLayerIsService(targetLayer) dla.addMessage(targetLayer) ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer if not dla.checkServiceCapabilities(sourceLayer,True): return False if not dla.checkServiceCapabilities(targetLayer,True): return False if svceS == True or svceT == True: token = dla.getSigninToken() # when signed in get the token and use this. Will be requested many times during the publish if token == None: dla.addError("User must be signed in for this tool to work with services") return expr = getWhereClause(xmlDoc) if useReplaceSettings == True and (expr == '' or expr == None): dla.addError("There must be an expression for replacing by field value, current value = " + str(expr)) return False dla.setWorkspace() targetName = dla.getTargetName(xmlDoc) res = dlaExtractLayerToGDB.extract(xmlFile,None,dla.workspace,sourceLayer,targetName) if res != True: table = dla.getTempTable(targetName) msg = "Unable to export data, there is a lock on existing datasets or another unknown error" if arcpy.TestSchemaLock(table) != True: msg = "Unable to export data, there is a lock on the intermediate feature class: " + table dla.addError(msg) print(msg) return else: res = dlaFieldCalculator.calculate(xmlFile,dla.workspace,targetName,False) if res == True: dlaTable = dla.getTempTable(targetName) res = doPublish(xmlDoc,dlaTable,targetLayer) arcpy.ResetProgressor() sourceLayer = None # set source and target back to None for multiple file processing targetLayer = None if res == False: err = "Data Assistant Update Failed, see messages for details" dla.addError(err) print(err)