def QueryAllFeatures(self,url,sql,out_fields="*",chunksize=1000,saveLocation="",outName=""): fl = None try: fl = FeatureLayer(url=url, securityHandler=self._securityHandler) qRes = fl.query(where=sql, returnIDsOnly=True) if 'error' in qRes: print (qRes) return qRes elif 'objectIds' in qRes: oids = qRes['objectIds'] total = len(oids) if total == 0: return {'success':True, 'message':"No features matched the query"} print ("%s features to be downloaded" % total) chunksize = min(chunksize, fl.maxRecordCount) combinedResults = None totalQueried = 0 for chunk in chunklist(l=oids, n=chunksize): oidsQuery = ",".join(map(str, chunk)) if not oidsQuery: continue else: results = fl.query(objectIds=oidsQuery, returnGeometry=True, out_fields=out_fields) if isinstance(results,FeatureSet): if combinedResults is None: combinedResults = results else: for feature in results.features: combinedResults.features.append(feature) totalQueried += len(results.features) print("{:.0%} Completed: {}/{}".format(totalQueried / float(total), totalQueried, total)) else: print (results) if saveLocation == "" or outName == "": return combinedResults else: return combinedResults.save(saveLocation=saveLocation, outName=outName) else: print (qRes) except: line, filename, synerror = trace() raise common.ArcRestHelperError({ "function": "QueryFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: fl = None del fl gc.collect()
def getCurrentGumCount(): global token gumCountOutField = "total_gum_count" observationsCountOutField = "total_observations" fl = FeatureLayer(url=url, securityHandler=token.securityhandler, initialize=True) statisticFilter = StatisticFilter() statisticFilter.add(statisticType="count", onStatisticField="gum_count", outStatisticFieldName=observationsCountOutField) statisticFilter.add(statisticType="sum", onStatisticField="gum_count", outStatisticFieldName=gumCountOutField) result = fl.query( statisticFilter=statisticFilter) #groupByFieldsForStatistics="city", gumCount = 0 obsCount = 0 for f in result.features: for key in f.asDictionary['attributes']: arcpy.AddMessage(key + ": " + str(f.asDictionary['attributes'][key])) if key == gumCountOutField: gumCount = f.asDictionary['attributes'][key] if key == observationsCountOutField: obsCount = f.asDictionary['attributes'][key] #arcpy.AddMessage("Retrieved Total Gum Count: " + str(gumCount) + " and Total Observations Count: " + str(obsCount) + " from AGOL") return gumCount, obsCount, fl, statisticFilter
def retrieveObservationsForCity(): #connectToAGOL() #setupEnvironment() fc = "Boundary" global token geometry = None for row in arcpy.da.SearchCursor(fc, ["OID@", "SHAPE@"]): # Print the current multipoint's ID print("Feature {}:".format(row[0])) geometry = row[1] print(geometry) geomFilter = GeometryFilter(geomObject=geometry) fl = FeatureLayer(url=url, securityHandler=token.securityhandler, initialize=True) result = fl.query(out_fields='*', returnGeometry=True, geometryFilter=geomFilter) return result.features
def main(*argv): """ main driver of program """ try: url = str(argv[0]) arcgisSH = ArcGISTokenSecurityHandler() if arcgisSH.valid == False: arcpy.AddError(arcgisSH.message) return fl = FeatureLayer( url=url, securityHandler=arcgisSH, initialize=True) res = fl.query(where="1=1",out_fields='*',returnGeometry=False) arcpy.AddMessage(res) arcpy.SetParameterAsText(1, str(res)) except arcpy.ExecuteError: line, filename, synerror = trace() arcpy.AddError("error on line: %s" % line) arcpy.AddError("error in file name: %s" % filename) arcpy.AddError("with error message: %s" % synerror) arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2)) except FunctionError, f_e: messages = f_e.args[0] arcpy.AddError("error in function: %s" % messages["function"]) arcpy.AddError("error on line: %s" % messages["line"]) arcpy.AddError("error in file name: %s" % messages["filename"]) arcpy.AddError("with error message: %s" % messages["synerror"]) arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
def main(*argv): """ main driver of program """ try: url = str(argv[0]) arcgisSH = ArcGISTokenSecurityHandler() if arcgisSH.valid == False: arcpy.AddError(arcgisSH.message) return fl = FeatureLayer(url=url, securityHandler=arcgisSH, initialize=True) res = fl.query(where="1=1", out_fields='*', returnGeometry=False) arcpy.AddMessage(res) arcpy.SetParameterAsText(1, str(res)) except arcpy.ExecuteError: line, filename, synerror = trace() arcpy.AddError("error on line: %s" % line) arcpy.AddError("error in file name: %s" % filename) arcpy.AddError("with error message: %s" % synerror) arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2)) except FunctionError, f_e: messages = f_e.args[0] arcpy.AddError("error in function: %s" % messages["function"]) arcpy.AddError("error on line: %s" % messages["line"]) arcpy.AddError("error in file name: %s" % messages["filename"]) arcpy.AddError("with error message: %s" % messages["synerror"]) arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
def update_agol(url, fld, sequence_value, interval, seq_format='{}'): """Update fetures in an agol/portal service with id values Return next valid sequence value""" # Connect to org securityinfo = {} securityinfo[ 'security_type'] = 'Portal' # LDAP, NTLM, OAuth, Portal, PKI, ArcGIS securityinfo['username'] = username securityinfo['password'] = password securityinfo['org_url'] = orgURL securityinfo['proxy_url'] = None securityinfo['proxy_port'] = None securityinfo['referer_url'] = None securityinfo['token_url'] = None securityinfo['certificatefile'] = None securityinfo['keyfile'] = None securityinfo['client_id'] = None securityinfo['secret_id'] = None shh = securityhandlerhelper.securityhandlerhelper( securityinfo=securityinfo) if not shh.valid: return 'Could not connect to {}. Please verify paths and credentials.'.format( url) fl = FeatureLayer(url=url, securityHandler=shh.securityhandler, proxy_port=None, proxy_url=None, initialize=True) # Build SQL query to find features missing id sql = """{} is null""".format(fld) out_fields = ['objectid', fld] # Get features without id resFeats = fl.query(where=sql, out_fields=','.join(out_fields)) # For each feature for feat in resFeats: id_value = seq_format.format(sequence_value) # Update id feat.set_value(fld, id_value) # Increment sequence value sequence_value += interval update_results = fl.updateFeature(features=resFeats) for res in update_results['updateResults']: if res['success'] == False: return 'error {}: {}'.format(res['error']['code'], res['error']['description']) return sequence_value
def retrieveObservations(): #token = securityhandlerhelper.securityhandlerhelper(config) global token #admin = arcrest.manageorg.Administration(securityHandler=token.securityhandler) #content = admin.content #userInfo = content.users.user() #userInfo.folders #arcpy.AddMessage(userInfo.folders) fl = FeatureLayer(url=url, securityHandler=token.securityhandler, initialize=True) result = fl.query(where="city = '(none)'", out_fields='*', returnGeometry=True) fl.query() #print(result.features) #print (fl.query(where="1=1",out_fields='*',returnGeometry=False) ) return result.features
def DeleteFeaturesFromFeatureLayer(self, url, sql, chunksize=0): fl = None try: fl = FeatureLayer(url=url, securityHandler=self._securityHandler) totalDeleted = 0 if chunksize > 0: qRes = fl.query(where=sql, returnIDsOnly=True) if "error" in qRes: print qRes return qRes elif "objectIds" in qRes: oids = qRes["objectIds"] total = len(oids) if total == 0: return {"success": "true", "message": "No features matched the query"} minId = min(oids) maxId = max(oids) i = 0 print "%s features to be deleted" % total while i <= len(oids): oidsDelete = ",".join(str(e) for e in oids[i : i + chunksize]) if oidsDelete == "": continue else: results = fl.deleteFeatures(objectIds=oidsDelete) if "deleteResults" in results: totalDeleted += len(results["deleteResults"]) print "%s%% Completed: %s/%s " % ( int(totalDeleted / float(total) * 100), totalDeleted, total, ) i += chunksize else: print results return {"success": "true", "message": "%s deleted" % totalDeleted} qRes = fl.query(where=sql, returnIDsOnly=True) if "objectIds" in qRes: oids = qRes["objectIds"] if len(oids) > 0: print "%s features to be deleted" % len(oids) results = fl.deleteFeatures(where=sql) if "deleteResults" in results: totalDeleted += len(results["deleteResults"]) return {"success": "true", "message": "%s deleted" % totalDeleted} else: return results return {"success": "true", "message": "%s deleted" % totalDeleted} else: print qRes else: results = fl.deleteFeatures(where=sql) if "deleteResults" in results: return {"success": "true", "message": totalDeleted + len(results["deleteResults"])} else: return results except: line, filename, synerror = trace() raise common.ArcRestHelperError( {"function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror} ) finally: fl = None del fl gc.collect()
def getTotalGumCount(): global token gumCountOutField = "total_gum_count" observationsCountOutField = "total_observations" fl = FeatureLayer(url=url, securityHandler=token.securityhandler, initialize=True) statisticFilter = StatisticFilter() statisticFilter.add(statisticType="count", onStatisticField="gum_count", outStatisticFieldName=observationsCountOutField) statisticFilter.add(statisticType="sum", onStatisticField="gum_count", outStatisticFieldName=gumCountOutField) result = fl.query( statisticFilter=statisticFilter) #groupByFieldsForStatistics="city", # with open('D:/Data/Gum/Staging/0_TotalGum.json', 'w') as fp: # json.dump(result.value, fp) #Get Total Gum Count #cursor = arcpy.UpdateCursor(table_GumSummaryByCity,"City = 'Total'") rows = arcpy.InsertCursor(table_GumSummaryByCity) for f in result.features: row = rows.newRow() row.setValue("city", "Total") for key in f.asDictionary['attributes']: arcpy.AddMessage(key + ": " + str(f.asDictionary['attributes'][key])) if key == gumCountOutField: gumCount = f.asDictionary['attributes'][key] if key == observationsCountOutField: obsCount = f.asDictionary['attributes'][key] row.setValue(key, f.asDictionary['attributes'][key]) row.setValue("avg_gum_count", gumCount / obsCount) rows.insertRow(row) del row #Get Gum Count By City result = fl.query(groupByFieldsForStatistics="city", statisticFilter=statisticFilter) # with open('D:/Data/Gum/Staging/1_GumByCity.json', 'w') as fp: # json.dump(result.value, fp) #rows = arcpy.InsertCursor(table_GumSummaryByCity) for f in result.features: row = rows.newRow() for key in f.asDictionary['attributes']: #arcpy.AddMessage(key + ": " + str(f.asDictionary['attributes'][key])) if key == gumCountOutField: gumCount = f.asDictionary['attributes'][key] if key == observationsCountOutField: obsCount = f.asDictionary['attributes'][key] row.setValue(key, f.asDictionary['attributes'][key]) row.setValue("avg_gum_count", gumCount / obsCount) rows.insertRow(row) del row
def downloadData(): print "beginning data download..." proxy_port = None proxy_url = None agolSH = AGOLTokenSecurityHandler(username=username, password=password) fl = FeatureLayer(url=feature_service_url, securityHandler=agolSH, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) oid_query_response = fl.query(returnIDsOnly=True) oid_list = oid_query_response["objectIds"] oid_list.sort() list_length = len(oid_list) print list_length if os.path.exists(output_folder + os.sep + feature_class_name + "_" + the_date + ".gdb"): shutil.rmtree(output_folder + os.sep + feature_class_name + "_" + the_date + ".gdb") if os.path.isfile(output_folder + os.sep + "Errors.csv"): os.remove(output_folder + os.sep + "Errors.csv") arcpy.CreateFileGDB_management(output_folder, feature_class_name + "_" + the_date) output_fgdb = output_folder + os.sep + feature_class_name + "_" + the_date + ".gdb" def updatedQuery(low, high, trigger): if low != high: updated_query = """"OBJECTID" >= """ + str( low) + " AND " + """"OBJECTID" < """ + str(high) if trigger == 1: updated_query = """"OBJECTID" >= """ + str(low) else: updated_query = """"OBJECTID" = """ + str(low) return updated_query errors = [] error_fields = [] fc = "" fields = ["SHAPE@"] low = 0 high = 1000 counter = 0 while low <= list_length: min = oid_list[low] try: max = oid_list[high] trigger = 0 except: totalFixed = list_length - 1 max = oid_list[totalFixed] trigger = 1 updated_query = updatedQuery(min, max, trigger) returned_data = fl.query(where=updated_query, out_fields='*', returnGeometry=True) returned_data_string = str(returned_data) d = json.loads(returned_data_string) print "dictionary compiled." if counter == 0: wkid = d['spatialReference']['latestWkid'] sr = arcpy.SpatialReference(wkid) arcpy.CreateFeatureclass_management(output_fgdb, feature_class_name, "POLYLINE", "", "DISABLED", "DISABLED", sr) fc = output_fgdb + os.sep + feature_class_name for field in d['fields']: print field["name"] error_fields.append(field["name"]) if field["name"] != "OBJECTID" and field[ "name"] != "Shape_Length" and field[ "name"] != "GlobalID": text_length = "" if field["type"] == "esriFieldTypeInteger": type = "LONG" elif field["type"] == "esriFieldTypeSmallInteger": type = "SHORT" elif field["type"] == "esriFieldTypeString": type = "TEXT" text_length = field["length"] elif field["type"] == "esriFieldTypeDouble": type = "DOUBLE" elif field["type"] == "esriFieldTypeFloat": type = "FLOAT" elif field["type"] == "esriFieldTypeDate": type = "DATE" arcpy.AddField_management(fc, field["name"], type, "", "", text_length, field["alias"]) fields.append(field["name"]) errors.append(error_fields) cursor = arcpy.da.InsertCursor(fc, fields) records = d["features"] for record in records: try: geom = record["geometry"] paths = geom["paths"] new_geom = arcpy.Array() for part in paths: this_part = arcpy.Array() for point in part: this_point = arcpy.Point(point[0], point[1]) this_part.append(this_point) new_geom.append(this_part) polyline = arcpy.Polyline(new_geom) except: polyline = arcpy.Polyline( arcpy.Array(arcpy.Array(arcpy.Point(0, 0)))) error_record = [] for err_fld in error_fields: error_record.append(record["attributes"][err_fld]) errors.append(error_record) print record values = [polyline] attributes = record["attributes"] for field in fields: if field != "SHAPE@": values.append(attributes[field]) cursor.insertRow(values) counter += 1 print str(counter) + "\\" + str(list_length) low += 1000 high += 1000 no_geom_csv = open(output_folder + os.sep + "Errors.csv", 'wb') writer = csv.writer(no_geom_csv) writer.writerows(errors) no_geom_csv.close()
securityinfo['proxy_url'] = proxy_url securityinfo['proxy_port'] = proxy_port securityinfo['referer_url'] = None securityinfo['token_url'] = None securityinfo['certificatefile'] = None securityinfo['keyfile'] = None securityinfo['client_id'] = None securityinfo['secret_id'] = None shh = securityhandlerhelper.securityhandlerhelper( securityinfo=securityinfo) if shh.valid == False: print shh.message else: fl = FeatureLayer(url=url, securityHandler=shh.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) out_fields = ['objectid'] for fld in fieldInfo: out_fields.append(fld['FieldName']) resFeats = fl.query(where=sql, out_fields=",".join(out_fields)) for feat in resFeats: for fld in fieldInfo: feat.set_value(fld["FieldName"], fld['ValueToSet']) print fl.updateFeature(features=resFeats)
def DeleteFeaturesFromFeatureLayer(self,url,sql,chunksize=0): fl = None try: fl = FeatureLayer( url=url, securityHandler=self._securityHandler) totalDeleted = 0 if chunksize > 0: qRes = fl.query(where=sql, returnIDsOnly=True) if 'error' in qRes: print (qRes) return qRes elif 'objectIds' in qRes: oids = qRes['objectIds'] total = len(oids) if total == 0: return {'success':True,'message': "No features matched the query"} i = 0 print ("%s features to be deleted" % total) while(i <= len(oids)): oidsDelete = ','.join(str(e) for e in oids[i:i+chunksize]) if oidsDelete == '': continue else: results = fl.deleteFeatures(objectIds=oidsDelete) if 'deleteResults' in results: totalDeleted += len(results['deleteResults']) print ("%s%% Completed: %s/%s " % (int(totalDeleted / float(total) *100), totalDeleted, total)) i += chunksize else: print (results) return {'success':True,'message': "%s deleted" % totalDeleted} qRes = fl.query(where=sql, returnIDsOnly=True) if 'objectIds' in qRes: oids = qRes['objectIds'] if len(oids)> 0 : print ("%s features to be deleted" % len(oids)) results = fl.deleteFeatures(where=sql) if 'deleteResults' in results: totalDeleted += len(results['deleteResults']) return {'success':True,'message': "%s deleted" % totalDeleted} else: return results return {'success':True,'message': "%s deleted" % totalDeleted} else: print (qRes) else: results = fl.deleteFeatures(where=sql) if results is not None: if 'deleteResults' in results: return {'success':True,'message': totalDeleted + len(results['deleteResults'])} else: return results except: line, filename, synerror = trace() raise common.ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: fl = None del fl gc.collect()
def DeleteFeaturesFromFeatureLayer(self, url, sql, chunksize=0): """Removes features from a hosted feature service layer by SQL query. Args: url (str): The URL of the feature service layer. sql (str): The SQL query to apply against the feature service. Those features that satisfy the query will be deleted. chunksize (int): The maximum amount of features to remove at a time. Defaults to 0. Returns: The result from :py:func:`arcrest.agol.services.FeatureLayer.deleteFeatures`. Notes: If you want to delete all features, it is suggested to use the SQL query ``"1=1"``. """ fl = None try: fl = FeatureLayer(url=url, securityHandler=self._securityHandler) totalDeleted = 0 if chunksize > 0: qRes = fl.query(where=sql, returnIDsOnly=True) if 'error' in qRes: print(qRes) return qRes elif 'objectIds' in qRes: oids = qRes['objectIds'] total = len(oids) if total == 0: return { 'success': True, 'message': "No features matched the query" } i = 0 print("%s features to be deleted" % total) while (i <= len(oids)): oidsDelete = ','.join( str(e) for e in oids[i:i + chunksize]) if oidsDelete == '': continue else: results = fl.deleteFeatures(objectIds=oidsDelete) if 'deleteResults' in results: totalDeleted += len(results['deleteResults']) print("%s%% Completed: %s/%s " % (int(totalDeleted / float(total) * 100), totalDeleted, total)) i += chunksize else: print(results) return { 'success': True, 'message': "%s deleted" % totalDeleted } qRes = fl.query(where=sql, returnIDsOnly=True) if 'objectIds' in qRes: oids = qRes['objectIds'] if len(oids) > 0: print("%s features to be deleted" % len(oids)) results = fl.deleteFeatures(where=sql) if 'deleteResults' in results: totalDeleted += len(results['deleteResults']) return { 'success': True, 'message': "%s deleted" % totalDeleted } else: return results return { 'success': True, 'message': "%s deleted" % totalDeleted } else: print(qRes) else: results = fl.deleteFeatures(where=sql) if results is not None: if 'deleteResults' in results: return { 'success': True, 'message': totalDeleted + len(results['deleteResults']) } else: return results except: line, filename, synerror = trace() raise common.ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, }) finally: fl = None del fl gc.collect()
def QueryAllFeatures(self, url=None, where="1=1", out_fields="*", timeFilter=None, geometryFilter=None, returnFeatureClass=False, out_fc=None, outSR=None, chunksize=1000, printIndent=""): """Performs an SQL query against a hosted feature service layer and returns all features regardless of service limit. Args: url (str): The URL of the feature service layer. where - the selection sql statement out_fields - the attribute fields to return timeFilter - a TimeFilter object where either the start time or start and end time are defined to limit the search results for a given time. The values in the timeFilter should be as UTC timestampes in milliseconds. No checking occurs to see if they are in the right format. geometryFilter - a GeometryFilter object to parse down a given query by another spatial dataset. returnFeatureClass - Default False. If true, query will be returned as feature class chunksize (int): The maximum amount of features to query at a time. Defaults to 1000. out_fc - only valid if returnFeatureClass is set to True. Output location of query. Output: A list of Feature Objects (default) or a path to the output featureclass if returnFeatureClass is set to True. """ if (url is None): return fl = None try: fl = FeatureLayer(url=url, securityHandler=self._securityHandler) qRes = fl.query(where=where, returnIDsOnly=True, timeFilter=timeFilter, geometryFilter=geometryFilter) if 'error' in qRes: print (printIndent + qRes) return [] elif 'objectIds' in qRes: oids = qRes['objectIds'] total = len(oids) if total == 0: return fl.query(where=where, returnGeometry=True, out_fields=out_fields, timeFilter=timeFilter, geometryFilter=geometryFilter, outSR=outSR) print (printIndent + "%s features to be downloaded" % total) chunksize = min(chunksize, fl.maxRecordCount) combinedResults = None totalQueried = 0 for chunk in chunklist(l=oids, n=chunksize): oidsQuery = ",".join(map(str, chunk)) if not oidsQuery: continue else: results = fl.query(objectIds=oidsQuery, returnGeometry=True, out_fields=out_fields, timeFilter=timeFilter, geometryFilter=geometryFilter, outSR=outSR) if isinstance(results,FeatureSet): if combinedResults is None: combinedResults = results else: for feature in results.features: combinedResults.features.append(feature) totalQueried += len(results.features) print(printIndent + "{:.0%} Completed: {}/{}".format(totalQueried / float(total), totalQueried, total)) else: print (printIndent + results) if returnFeatureClass == True: return combinedResults.save(*os.path.split(out_fc)) else: return combinedResults else: print (printIndent + qRes) except: line, filename, synerror = trace() raise common.ArcRestHelperError({ "function": "QueryAllFeatures", "line": line, "filename": filename, "synerror": synerror, } ) finally: fl = None del fl gc.collect()
from arcrest.security import AGOLTokenSecurityHandler from arcrest.agol import FeatureLayer if __name__ == "__main__": username = "******" password = "******" url = "<URL to Feature Layer>" proxy_port = None proxy_url = None agolSH = AGOLTokenSecurityHandler(username=username, password=password) fl = FeatureLayer( url=url, securityHandler=agolSH, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) print fl.query(where="1=1",out_fields='*',returnGeometry=False)
def main(): try: dateTimeFormat = "%Y/%m/%d %H:%M:%S" #Date time format of the service, example'2016-04-26 04:00:00' #log file to store details logFile = r"c:\temp\adoptedAssets.log" common.init_log(logFile) print("###### Date Extraction Process Started ######") username = "" password = "" proxy_port = None proxy_url = None agolSH = None print("\tStarted at {0}".format( datetime.datetime.now().strftime(dateTimeFormat))) #Create a authenicated connection to portal if username != "": agolSH = AGOLTokenSecurityHandler(username=username, password=password) print("\tLogged into the portal") #Settings url = 'http://services1.arcgis.com/DlnuvLGpDczjeSgG/arcgis/rest/services/CatchBasin/FeatureServer/0/' #URL to adoption service statusField = 'Assetstatus' #Field with status, used to build SQL statusValue = 'Adopted' #Value to search for in the StatusField statusUpdateField = 'Laststatusupdate' #Field used to restrict query to only records since last query out_fields = 'OBJECTID,GIS_ID,Nickname' #Fields to save to the output CSV #The location and file name to save the results to saveLocation = r"c:\temp\adoptedAssets.csv" #File with the date of the last run, if it does not exist, all features are returned and file is created for next run lastRunDetails = r"c:\temp\lastrundate.txt" lastQueryDate = None #Start building the SQL Query sql = statusField + " = '" + statusValue + "'" #Open the file with the last run date if os.path.isfile(lastRunDetails): print("\tLast run file exist") with open(lastRunDetails, 'r') as configFile: lastQueryDate = configFile.read() configFile.close() print("\t\tLast query date: {0}".format(lastQueryDate)) #If the last query date file was found and value is a date if lastQueryDate is not None and validate( date_text=lastQueryDate, dateTimeFormat=dateTimeFormat): sql = sql + " AND " + statusUpdateField + " >= " + "'" + lastQueryDate + "'" #Add current time to query queryDate = datetime.datetime.now().strftime(dateTimeFormat) sql = sql + " AND " + statusUpdateField + " <= " + "'" + queryDate + "'" print("\tSQL: {0}".format(sql)) #Create a connection to the layer fl = FeatureLayer(url=url, securityHandler=agolSH, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) #query the layer featureSet = fl.query(where=sql, out_fields=out_fields, returnGeometry=False) print("\t{0} feature returned".format(len(featureSet.features))) #Create a new output writer if (len(featureSet.features) == 0): if os.path.isfile(saveLocation): os.remove(saveLocation) else: with open(saveLocation, "wb+") as csvFile: f = csv.writer(csvFile) fields = [] #write the headers to the csv for field in featureSet.fields: fields.append(field['name']) f.writerow(fields) newRow = [] #Loop through the results and save each to a row for feature in featureSet: newRow = [] for field in featureSet.fields: newRow.append(feature.get_value(field['name'])) f.writerow(newRow) csvFile.close() print("\tCSV updated") #Update the last run file with open(lastRunDetails, 'w') as configFile: configFile.write(queryDate) configFile.close() print("\t{0} saved to file".format(queryDate)) print("\tCompleted at {0}".format( datetime.datetime.now().strftime(dateTimeFormat))) print("###### Completed ######") except: line, filename, synerror = trace() print("error on line: %s" % line) print("error in file name: %s" % filename) print("with error message: %s" % synerror)
def main(): """Scan fields in services looking for explicit/sensitive words (as defined). Features are updated if content is found so that a map filter can be used to hide this content""" filter_log = path.join(sys.path[0], 'filter_log.log') with open(filter_log, 'a') as log: log.write('\n{}\n'.format(dt.now())) try: # Build regular expression of explicit words (uppercase formatting) badwords = list(set([str(word).upper() for word in wordlist.bad_words])) badwordsexact = list(set([str(word).upper() for word in wordlist.bad_words_exact])) explicit_filter = '' if badwords: explicit_filter += build_expression(badwords) if badwordsexact: explicit_filter += '|{}'.format(build_expression(badwordsexact)) elif badwordsexact: explicit_filter += build_expression(badwordsexact) # Build regular expression of sensitive words (uppercase formatting) sensitivewords = list(set([str(word).upper() for word in wordlist.sensitive_words])) sensitive_filter = build_expression(sensitivewords) shh = get_shh() if isinstance(shh, str) or isinstance(shh, dict): raise Exception(m1.format(wordlist.orgURL)) # Process each of the services listed above for service in wordlist.services: try: fl = FeatureLayer(url=service['url'], securityHandler=shh.securityhandler, proxy_port=None, proxy_url=None, initialize=True) except: raise Exception(m1.format(service['url'])) # Build SQL query to find visible features if service['status field']: sql = """{} = '{}' AND {} = '{}'""".format(service['flag field'], wordlist.visible_value, service['status field'], wordlist.status_value) else: sql = """{} = '{}'""".format(service['flag field'], wordlist.visible_value) # Fields that will be returned by query out_fields = ['objectid', service['flag field']] + service['fields to scan'] if service['reason field']: out_fields.append(service['reason field']) # Get publicly visible features of the defined status resFeats = fl.query(where=sql, out_fields=",".join(out_fields)) # For each public feature for feat in resFeats: explicit_content = False sensitive_content = False # Check each field listed for explicit or sensitive content for field in service['fields to scan']: text = feat.get_value(field) text = text.upper() # Find words that are on the bad words list if explicit_filter: if re.search(explicit_filter, text): explicit_content = True break # Find words that are on the sensitive words list if sensitive_filter: if re.search(sensitive_filter, text): sensitive_content = True break if sensitive_content or explicit_content: if service['reason field']: reason = '' # Get current reason, if any, and append new reason cur_reason = feat.get_value(service['reason field']) if cur_reason: reason += "{} ".format(cur_reason) if explicit_content: reason += m2 elif sensitive_content: reason += m3 # Update reason feat.set_value(service['reason field'], reason) # Mark feature with hidden value feat.set_value(service['flag field'], wordlist.hidden_value) # Commit updates and print status status = fl.updateFeature(features=resFeats) log.write('{}\n'.format(status)) except Exception as ex: log.write('{}\n'.format(ex))
def QueryAllFeatures(self,url,sql,chunksize=0,saveLocation="",outName=""): fl = None try: fl = FeatureLayer( url=url, securityHandler=self._securityHandler) totalQueried = 0 if chunksize > 0: qRes = fl.query(where=sql, returnIDsOnly=True) if 'error' in qRes: print qRes return qRes elif 'objectIds' in qRes: oids = qRes['objectIds'] total = len(oids) if total == 0: return {'success':'true','message': "No features matched the query"} minId = min(oids) maxId = max(oids) i = 0 print "%s features to be downloaded" % total combinedResults = None while(i <= len(oids)): oidsQuery = ','.join(str(e) for e in oids[i:i+chunksize]) if oidsQuery == '': continue else: results = fl.query(objectIds=oidsQuery, returnGeometry=True, out_fields='*') if isinstance(results,FeatureSet): if combinedResults is None: combinedResults = results else: for feature in results.features: combinedResults.features.append(feature) totalQueried += len(results.features) print "%s%% Completed: %s/%s " % (int(totalQueried / float(total) *100), totalQueried, total) i += chunksize else: print results print combinedResults.save(saveLocation=saveLocation, outName=outName) else: print qRes else: return fl.query(where=sql, returnFeatureClass=True, returnGeometry=True, out_fields='*', out_fc=os.path.join(saveLocation,outName) ) except: line, filename, synerror = trace() raise common.ArcRestHelperError({ "function": "QueryFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: fl = None del fl gc.collect()
def DeleteFeaturesFromFeatureLayer(self, url, sql, chunksize=0): """Removes features from a hosted feature service layer by SQL query. Args: url (str): The URL of the feature service layer. sql (str): The SQL query to apply against the feature service. Those features that satisfy the query will be deleted. chunksize (int): The maximum amount of features to remove at a time. Defaults to 0. Returns: The result from :py:func:`arcrest.agol.services.FeatureLayer.deleteFeatures`. Notes: If you want to delete all features, it is suggested to use the SQL query ``"1=1"``. """ fl = None try: fl = FeatureLayer( url=url, securityHandler=self._securityHandler) totalDeleted = 0 if chunksize > 0: qRes = fl.query(where=sql, returnIDsOnly=True) if 'error' in qRes: print (qRes) return qRes elif 'objectIds' in qRes: oids = qRes['objectIds'] total = len(oids) if total == 0: return {'success':True,'message': "No features matched the query"} i = 0 print ("%s features to be deleted" % total) while(i <= len(oids)): oidsDelete = ','.join(str(e) for e in oids[i:i+chunksize]) if oidsDelete == '': continue else: results = fl.deleteFeatures(objectIds=oidsDelete) if 'deleteResults' in results: totalDeleted += len(results['deleteResults']) print ("%s%% Completed: %s/%s " % (int(totalDeleted / float(total) *100), totalDeleted, total)) i += chunksize else: print (results) return {'success':True,'message': "%s deleted" % totalDeleted} qRes = fl.query(where=sql, returnIDsOnly=True) if 'objectIds' in qRes: oids = qRes['objectIds'] if len(oids)> 0 : print ("%s features to be deleted" % len(oids)) results = fl.deleteFeatures(where=sql) if 'deleteResults' in results: totalDeleted += len(results['deleteResults']) return {'success':True,'message': "%s deleted" % totalDeleted} else: return results return {'success':True,'message': "%s deleted" % totalDeleted} else: print (qRes) else: results = fl.deleteFeatures(where=sql) if results is not None: if 'deleteResults' in results: return {'success':True,'message': totalDeleted + len(results['deleteResults'])} else: return results except: line, filename, synerror = trace() raise common.ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: fl = None del fl gc.collect()
from __future__ import print_function from arcrest.security import AGOLTokenSecurityHandler from arcrest.agol import FeatureLayer if __name__ == "__main__": username = "******" password = "******" url = "<URL to Feature Layer>" proxy_port = None proxy_url = None agolSH = AGOLTokenSecurityHandler(username=username, password=password) fl = FeatureLayer( url=url, securityHandler=agolSH, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) print (fl.query(where="1=1",out_fields='*',returnGeometry=False) )
sec_handle = securityhandlerhelper.securityhandlerhelper( securityinfo=securityinfo) if sec_handle.valid == False: print sec_handle.message else: #create a feature layer of the AGOL service feature_layer = FeatureLayer( url=url, securityHandler=sec_handle.securityhandler, proxy_port=None, proxy_url=None, initialize=True) out_fields = ['objectid'] #append the field info to each field for field in fieldInfo: out_fields.append(field['FieldName']) #query the feature layer query_feats = feature_layer.query(where=sql, out_fields=",".join(out_fields)) #loop over each feature and field to update for feat in query_feats: for field in fieldInfo: feat.set_value(field["FieldName"], field['ValueToSet']) #update features print feature_layer.updateFeature(features=query_feats)
proxy_port = None proxy_url = None agolSH = AGOLTokenSecurityHandler(username=username, password=password) fl = FeatureLayer( url=url, securityHandler=agolSH, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) qmin = 0 qmax = 2000 count = fl.query(returnIDsOnly=True) cList = count["objectIds"] cValue = cList[-1] listLen = len(cList) print listLen now = datetime.datetime.now() curMonth = now.strftime("%m") curDay = now.strftime("%d") curYear = now.strftime("%Y") arcpy.CreateFileGDB_management(output, "GET" + curYear + curMonth + curDay) werkspace = output + os.sep + "GET" + curYear + curMonth + curDay + ".gdb" sample = "C:\\TxDOT\\Scripts\\javascript\\Guardrail\\Data\\GuardrailPoints.gdb\\GuardrailPoints" sr = arcpy.SpatialReference(3857) arcpy.CreateFeatureclass_management(werkspace, "GuardrailEndTreatments", "POINT", sample, "DISABLED", "DISABLED", sr)
def main(): try: proxy_port = None proxy_url = None securityinfo = {} securityinfo[ 'security_type'] = 'Portal' #LDAP, NTLM, OAuth, Portal, PKI securityinfo['username'] = "" #<UserName> securityinfo['password'] = "" #<Password> securityinfo['org_url'] = "http://www.arcgis.com" securityinfo['proxy_url'] = proxy_url securityinfo['proxy_port'] = proxy_port securityinfo['referer_url'] = None securityinfo['token_url'] = None securityinfo['certificatefile'] = None securityinfo['keyfile'] = None securityinfo['client_id'] = None securityinfo['secret_id'] = None workforceProjectID = '' #Workforce project number assignmentAreasID = '' #ID of service to get centroids from assignmentAreaLayerName = '' #layer in servuce assignmentAreaNameField = '' #field with name of id area csvPath = r".\dataToLookup.csv" #<Path with data> workerCol = 'worker' areaCol = 'area' descriptionCol = "description" notesCol = "notes" supervisorCol = "supervisor" assignmentType = 2 status = 1 workerNameToID = {} dispatcherNameToID = {} areaNameToID = {} fst = featureservicetools.featureservicetools(securityinfo) if fst.valid == False: print(fst.message) else: portalAdmin = arcrest.manageorg.Administration( securityHandler=fst.securityhandler) #Get the assignment areas fs = fst.GetFeatureService(itemId=assignmentAreasID, returnURLOnly=False) if not fs is None: fs_url = fst.GetLayerFromFeatureService( fs=fs, layerName=assignmentAreaLayerName, returnURLOnly=True) if not fs_url is None: fl = FeatureLayer(url=fs_url, securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) areaResults = fl.query( **{ 'where': "1=1", 'outSR': '102100', 'out_fields': assignmentAreaNameField, 'returnGeometry': False, 'returnCentroid': True }) for area in areaResults: arDict = area.asDictionary areaNameToID[arDict['attributes'][ assignmentAreaNameField]] = arDict['centroid'] #Get the workers item = portalAdmin.content.getItem(itemId=workforceProjectID) itemData = item.itemData() if 'workers' in itemData: fl = FeatureLayer(url=itemData['workers']['url'], securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) workersResults = fl.query(where="1=1", out_fields='OBJECTID, NAME', returnGeometry=False) for worker in workersResults: workerNameToID[worker.get_value( 'name')] = worker.get_value('OBJECTID') if 'dispatchers' in itemData: fl = FeatureLayer(url=itemData['dispatchers']['url'], securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) dispatcherResults = fl.query(where="1=1", out_fields='OBJECTID, NAME', returnGeometry=False) for dispatcher in dispatcherResults: dispatcherNameToID[dispatcher.get_value( 'name')] = dispatcher.get_value('OBJECTID') if 'assignments' in itemData: features = [] fl = FeatureLayer(url=itemData['assignments']['url'], securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) print(fl.deleteFeatures(where="1=1")) with open(csvPath) as csvfile: reader = UnicodeDictReader(csvfile) for row in reader: json_string = {} json_string['geometry'] = {} centroidInfo = areaNameToID[row[areaCol].strip()] json_string['geometry']['x'] = centroidInfo['x'] json_string['geometry']['y'] = centroidInfo['y'] json_string['attributes'] = {} json_string['attributes']['workerId'] = workerNameToID[ row[workerCol].strip()] json_string['attributes']['description'] = row[ descriptionCol] json_string['attributes']['notes'] = row[notesCol] json_string['attributes'][ 'assignmentType'] = assignmentType json_string['attributes']['status'] = status json_string['attributes'][ 'dispatcherId'] = dispatcherNameToID[ row[supervisorCol].strip()] features.append(Feature(json_string=json_string)) results = fl.addFeature(features=features) if 'error' in results: print("Error in response from server: %s" % results['error']) else: if results['addResults'] is not None: featSucces = 0 for result in results['addResults']: if 'success' in result: if result['success'] == False: if 'error' in result: print("Error info: %s" % (result['error'])) else: featSucces = featSucces + 1 print("%s features added to %s" % (featSucces, fl.name)) else: print("0 features added to %s /n result info %s" % (fl.name, str(results))) except (common.ArcRestHelperError) as e: print("error in function: %s" % e[0]['function']) print("error on line: %s" % e[0]['line']) print("error in file name: %s" % e[0]['filename']) print("with error message: %s" % e[0]['synerror']) if 'arcpyError' in e[0]: print("with arcpy message: %s" % e[0]['arcpyError']) except: line, filename, synerror = trace() print("error on line: %s" % line) print("error in file name: %s" % filename) print("with error message: %s" % synerror)
def QueryAllFeatures(self, url, sql, out_fields="*", chunksize=1000, savePath=None): """Performs an SQL query against a hosted feature service layer. Args: url (str): The URL of the feature service layer. sql (str): The SQL query to apply against the feature service. Those features that satisfy the query will be returned. out_fields (str): A comma delimited list of field names to return. Defaults to ``"*"``, i.e., return all fields chunksize (int): The maximum amount of features to query at a time. Defaults to 1000. savePath (str): The full path on disk where the features will be saved. Defaults to ``None``. Returns: When ``savePath`` is not provided (``None``), the result from :py:func:`arcrest.agol.services.FeatureLayer.query`. When ``savePath`` is provided, the result from :py:func:`arcrest.common.general.FeatureSet.save`. """ fl = None try: fl = FeatureLayer(url=url, securityHandler=self._securityHandler) qRes = fl.query(where=sql, returnIDsOnly=True) if 'error' in qRes: print (qRes) return qRes elif 'objectIds' in qRes: oids = qRes['objectIds'] total = len(oids) if total == 0: return {'success':True, 'message':"No features matched the query"} print ("%s features to be downloaded" % total) chunksize = min(chunksize, fl.maxRecordCount) combinedResults = None totalQueried = 0 for chunk in chunklist(l=oids, n=chunksize): oidsQuery = ",".join(map(str, chunk)) if not oidsQuery: continue else: results = fl.query(objectIds=oidsQuery, returnGeometry=True, out_fields=out_fields) if isinstance(results,FeatureSet): if combinedResults is None: combinedResults = results else: for feature in results.features: combinedResults.features.append(feature) totalQueried += len(results.features) print("{:.0%} Completed: {}/{}".format(totalQueried / float(total), totalQueried, total)) else: print (results) if savePath is None or savePath == '': return combinedResults else: return combinedResults.save(*os.path.split(savePath)) else: print (qRes) except: line, filename, synerror = trace() raise common.ArcRestHelperError({ "function": "QueryFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: fl = None del fl gc.collect()
def main(): try: proxy_port = None proxy_url = None securityinfo = {} securityinfo['security_type'] = 'Portal'#LDAP, NTLM, OAuth, Portal, PKI securityinfo['username'] = ""#<UserName> securityinfo['password'] = ""#<Password> securityinfo['org_url'] = "http://www.arcgis.com" securityinfo['proxy_url'] = proxy_url securityinfo['proxy_port'] = proxy_port securityinfo['referer_url'] = None securityinfo['token_url'] = None securityinfo['certificatefile'] = None securityinfo['keyfile'] = None securityinfo['client_id'] = None securityinfo['secret_id'] = None workforceProjectID = '' #Workforce project number assignmentAreasID = '' #ID of service to get centroids from assignmentAreaLayerName = ''#layer in servuce assignmentAreaNameField = ''#field with name of id area csvPath = r".\dataToLookup.csv"#<Path with data> workerCol = 'worker' areaCol = 'area' descriptionCol = "description" notesCol = "notes" supervisorCol = "supervisor" assignmentType = 2 status = 1 workerNameToID = {} dispatcherNameToID = {} areaNameToID = {} fst = featureservicetools.featureservicetools(securityinfo) if fst.valid == False: print (fst.message) else: portalAdmin = arcrest.manageorg.Administration(securityHandler=fst.securityhandler) #Get the assignment areas fs = fst.GetFeatureService(itemId=assignmentAreasID,returnURLOnly=False) if not fs is None: fs_url = fst.GetLayerFromFeatureService(fs=fs,layerName=assignmentAreaLayerName,returnURLOnly=True) if not fs_url is None: fl = FeatureLayer( url=fs_url, securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) areaResults = fl.query(**{'where':"1=1",'outSR':'102100','out_fields':assignmentAreaNameField,'returnGeometry':False,'returnCentroid':True}) for area in areaResults: arDict = area.asDictionary areaNameToID[arDict['attributes'][assignmentAreaNameField]] = arDict['centroid'] #Get the workers item = portalAdmin.content.getItem(itemId=workforceProjectID) itemData = item.itemData() if 'workers' in itemData: fl = FeatureLayer( url=itemData['workers']['url'], securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) workersResults = fl.query(where="1=1",out_fields='OBJECTID, NAME',returnGeometry=False) for worker in workersResults: workerNameToID[worker.get_value('name')] = worker.get_value('OBJECTID') if 'dispatchers' in itemData: fl = FeatureLayer( url=itemData['dispatchers']['url'], securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) dispatcherResults = fl.query(where="1=1",out_fields='OBJECTID, NAME',returnGeometry=False) for dispatcher in dispatcherResults: dispatcherNameToID[dispatcher.get_value('name')] = dispatcher.get_value('OBJECTID') if 'assignments' in itemData: features = [] fl = FeatureLayer( url=itemData['assignments']['url'], securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) print(fl.deleteFeatures(where="1=1")) with open(csvPath) as csvfile: reader = UnicodeDictReader(csvfile) for row in reader: json_string={} json_string['geometry'] = {} centroidInfo = areaNameToID[row[areaCol].strip()] json_string['geometry']['x'] = centroidInfo['x'] json_string['geometry']['y'] = centroidInfo['y'] json_string['attributes'] ={} json_string['attributes']['workerId'] = workerNameToID[row[workerCol].strip()] json_string['attributes']['description'] = row[descriptionCol] json_string['attributes']['notes'] = row[notesCol] json_string['attributes']['assignmentType'] = assignmentType json_string['attributes']['status'] = status json_string['attributes']['dispatcherId'] = dispatcherNameToID[row[supervisorCol].strip()] features.append(Feature(json_string=json_string)) results = fl.addFeature(features=features) if 'error' in results: print ("Error in response from server: %s" % results['error']) else: if results['addResults'] is not None: featSucces = 0 for result in results['addResults']: if 'success' in result: if result['success'] == False: if 'error' in result: print ("Error info: %s" % (result['error'])) else: featSucces = featSucces + 1 print ("%s features added to %s" % (featSucces,fl.name)) else: print ("0 features added to %s /n result info %s" % (fl.name,str(results))) except (common.ArcRestHelperError) as e: print ("error in function: %s" % e[0]['function']) print ("error on line: %s" % e[0]['line']) print ("error in file name: %s" % e[0]['filename']) print ("with error message: %s" % e[0]['synerror']) if 'arcpyError' in e[0]: print ("with arcpy message: %s" % e[0]['arcpyError']) except: line, filename, synerror = trace() print ("error on line: %s" % line) print ("error in file name: %s" % filename) print ("with error message: %s" % synerror)
def DeleteFeaturesFromFeatureLayer(self,url,sql,chunksize=0): fl = None try: fl = FeatureLayer( url=url, securityHandler=self._securityHandler) totalDeleted = 0 if chunksize > 0: qRes = fl.query(where=sql, returnIDsOnly=True) if 'error' in qRes: print qRes return qRes elif 'objectIds' in qRes: oids = qRes['objectIds'] total = len(oids) if total == 0: return {'success':'true','message': "No features matched the query"} minId = min(oids) maxId = max(oids) i = 0 print "%s features to be deleted" % total while(i <= len(oids)): oidsDelete = ','.join(str(e) for e in oids[i:i+chunksize]) if oidsDelete == '': continue else: results = fl.deleteFeatures(objectIds=oidsDelete) if 'deleteResults' in results: totalDeleted += len(results['deleteResults']) print "%s%% Completed: %s/%s " % (int(totalDeleted / float(total) *100), totalDeleted, total) i += chunksize else: print results return {'success':'true','message': "%s deleted" % totalDeleted} qRes = fl.query(where=sql, returnIDsOnly=True) if 'objectIds' in qRes: oids = qRes['objectIds'] if len(oids)> 0 : print "%s features to be deleted" % len(oids) results = fl.deleteFeatures(where=sql) if 'deleteResults' in results: totalDeleted += len(results['deleteResults']) return {'success':'true','message': "%s deleted" % totalDeleted} else: return results return {'success':'true','message': "%s deleted" % totalDeleted} else: print qRes else: results = fl.deleteFeatures(where=sql) if 'deleteResults' in results: return {'success':'true','message': totalDeleted + len(results['deleteResults'])} else: return results except: line, filename, synerror = trace() raise common.ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: fl = None del fl gc.collect()
proxy_port = None proxy_url = None agolSH = AGOLTokenSecurityHandler(username=username, password=password) fl = FeatureLayer( url=url, securityHandler=agolSH, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) qmin = 0 qmax = 2000 count = fl.query(returnIDsOnly=True) cList = count["objectIds"] cValue = cList[-1] listLen = len(cList) print listLen now = datetime.datetime.now() curMonth = now.strftime("%m") curDay = now.strftime("%d") curYear = now.strftime("%Y") # arcpy.CreateFileGDB_management(output, "GET" + curYear + curMonth + curDay) werkspace = output + os.sep + "GET" + curYear + curMonth + curDay + ".gdb" # sample = "C:\\TxDOT\\Scripts\\javascript\\Guardrail\\Data\\GuardrailPoints.gdb\\GuardrailPoints" # sr = arcpy.SpatialReference(3857) # arcpy.CreateFeatureclass_management(werkspace, "GuardrailEndTreatments", "POINT", sample, "DISABLED", "DISABLED", sr)
securityinfo['proxy_port'] = proxy_port securityinfo['referer_url'] = None securityinfo['token_url'] = None securityinfo['certificatefile'] = None securityinfo['keyfile'] = None securityinfo['client_id'] = None securityinfo['secret_id'] = None shh = securityhandlerhelper.securityhandlerhelper(securityinfo=securityinfo) if shh.valid == False: print shh.message else: fl= FeatureLayer( url=url, securityHandler=shh.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) out_fields = ['objectid'] for fld in fieldInfo: out_fields.append(fld['FieldName']) resFeats = fl.query(where=sql, out_fields=",".join(out_fields)) for feat in resFeats: for fld in fieldInfo: feat.set_value(fld["FieldName"],fld['ValueToSet']) print fl.updateFeature(features=resFeats)
def downloadData(): print "beginning data download..." proxy_port = None proxy_url = None agolSH = AGOLTokenSecurityHandler(username=username, password=password) fl = FeatureLayer( url=feature_service_url, securityHandler=agolSH, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) oid_query_response = fl.query(returnIDsOnly=True) oid_list = oid_query_response["objectIds"] oid_list.sort() list_length = len(oid_list) print list_length if os.path.exists(output_folder + os.sep + feature_class_name + "_" + the_date + ".gdb"): shutil.rmtree(output_folder + os.sep + feature_class_name + "_" + the_date + ".gdb") if os.path.isfile(output_folder + os.sep + "Errors.csv"): os.remove(output_folder + os.sep + "Errors.csv") arcpy.CreateFileGDB_management(output_folder, feature_class_name + "_" + the_date) output_fgdb = output_folder + os.sep + feature_class_name + "_" + the_date + ".gdb" def updatedQuery(low, high, trigger): if low != high: updated_query = """"OBJECTID" >= """ + str(low) + " AND " + """"OBJECTID" < """ + str(high) if trigger == 1: updated_query = """"OBJECTID" >= """ + str(low) else: updated_query = """"OBJECTID" = """ + str(low) return updated_query errors = [] error_fields = [] fc = "" fields = ["SHAPE@"] low = 0 high = 1000 counter = 0 while low <= list_length: min = oid_list[low] try: max = oid_list[high] trigger = 0 except: totalFixed = list_length - 1 max = oid_list[totalFixed] trigger = 1 updated_query = updatedQuery(min, max, trigger) returned_data = fl.query(where=updated_query,out_fields='*',returnGeometry=True) returned_data_string = str(returned_data) d = json.loads(returned_data_string) print "dictionary compiled." if counter == 0: wkid = d['spatialReference']['latestWkid'] sr = arcpy.SpatialReference(wkid) arcpy.CreateFeatureclass_management(output_fgdb, feature_class_name, "POLYLINE", "", "DISABLED", "DISABLED", sr) fc = output_fgdb + os.sep + feature_class_name for field in d['fields']: print field["name"] error_fields.append(field["name"]) if field["name"] != "OBJECTID" and field["name"] != "Shape_Length" and field["name"] != "GlobalID": text_length = "" if field["type"] == "esriFieldTypeInteger": type = "LONG" elif field["type"] == "esriFieldTypeSmallInteger": type = "SHORT" elif field["type"] == "esriFieldTypeString": type = "TEXT" text_length = field["length"] elif field["type"] == "esriFieldTypeDouble": type = "DOUBLE" elif field["type"] == "esriFieldTypeFloat": type = "FLOAT" elif field["type"] == "esriFieldTypeDate": type = "DATE" arcpy.AddField_management(fc, field["name"], type, "", "", text_length, field["alias"]) fields.append(field["name"]) errors.append(error_fields) cursor = arcpy.da.InsertCursor(fc, fields) records = d["features"] for record in records: try: geom = record["geometry"] paths = geom["paths"] new_geom = arcpy.Array() for part in paths: this_part = arcpy.Array() for point in part: this_point = arcpy.Point(point[0], point[1]) this_part.append(this_point) new_geom.append(this_part) polyline = arcpy.Polyline(new_geom) except: polyline = arcpy.Polyline(arcpy.Array(arcpy.Array(arcpy.Point(0,0)))) error_record = [] for err_fld in error_fields: error_record.append(record["attributes"][err_fld]) errors.append(error_record) print record values = [polyline] attributes = record["attributes"] for field in fields: if field != "SHAPE@": values.append(attributes[field]) cursor.insertRow(values) counter += 1 print str(counter) + "\\" + str(list_length) low += 1000 high += 1000 no_geom_csv = open(output_folder + os.sep + "Errors.csv", 'wb') writer = csv.writer(no_geom_csv) writer.writerows(errors) no_geom_csv.close()