def treatJSON(file, crs): with open(file) as f: dico = json.load(f) base_points = dico base_points["features"] = [] base_lines = base_polygons = base_points for f in dico["features"]: geom = f["geometry"] if geom["x"]: base_points["features"].append(f) elif geom["paths"]: base_lines["features"].append(f) elif geom["rings"]: base_polygons["features"].append(f) results = [None, None, None] if len(base_points["features"]) > 0: arcpy.JSONToFeatures_conversion(base_points, r"in_memory\Points") results[0] = r"in_memory\Points" if len(base_lines["features"]) > 0: arcpy.JSONToFeatures_conversion(base_lines, r"in_memory\Lines") results[1] = r"in_memory\Lines" if len(base_polygons["features"]) > 0: arcpy.JSONToFeatures_conversion(base_polygons, r"in_memory\Polygons") results[2] = r"in_memory\Polygons" return results
def count_points_in_poly(url_points, url_poly): urllib.request.urlretrieve(url_points, 'points.geojson') arcpy.JSONToFeatures_conversion('points.geojson', os.path.join('points.shp'), 'POINT') urllib.request.urlretrieve(url_poly, 'poly.geojson') ## convert downloaded file to shapefile arcpy.JSONToFeatures_conversion('poly.geojson', os.path.join('poly.shp')) arcpy.SpatialJoin_analysis("poly", "points", "in_memory/points_SpatialJoin") arcpy.Delete_management('poly') arcpy.Delete_management('points')
def createFeatures(self): for j in jsonFiles: name = j.split(".")[-2] print name jtmp = arcpy.JSONToFeatures_conversion( os.path.join(JSONS, j), os.path.join(self.scratch, name)) if j.split('_')[0] in ["GPT", "GPL", "GPO"]: ft = arcpy.CopyFeatures_management( os.path.join(self.scratch, name), os.path.join(self.pathGdb, name)) elif j.split('_')[0] == "TB": tb = arcpy.JSONToFeatures_conversion( os.path.join(JSONS, j), os.path.join(self.scratch, 'a{:.5}'.format(str(uuid.uuid4())))) arcpy.TableToTable_conversion(tb, self.gdb, name)
def JsonToShp(fileRoot, item, type, timeItem): arcpy.JSONToFeatures_conversion( fileRoot + '\\' + 'json' + '_' + str(resolution) + '\\' + type + '_' + timeItem + '_' + str(resolution) + '\\' + item, fileRoot + '\\' + 'shapefile' + '_' + str(resolution) + '\\' + type + '_' + timeItem + '_' + str(resolution) + '\\' + item.split('.')[0] + '_' + str(resolution) + '.shp')
def by_json(ids, base_URL, date, output_fc, batch_size): ''' Gets features by loading everything into memory, then writing it all out to a JSON file, then using arcpy to load JSON into a feature class. 2x faster than using feature sets, but uses lots of memory (tens of gb on the SL County Parcels feature set). ''' # dictionary to hold JSON representation of all downloaded features features_json = {} total_count = len(ids) num_batches = -(-total_count // batch_size) # ceiling dvision batch = 1 for i in range(0, total_count, batch_size): # Using "where oid >= x and oid <= y" (get all features between x and y # inclusive), set x to be oid[i], y to be oid[i + (batch_size-1)] to # make sure we don't get any duplicates i_plus = i+batch_size-1 # batch end index if i_plus < total_count: # boundary check on last batch's end index begin_oid = ids[i] end_oid = ids[i_plus] else: # if last batch index goes out of bounds, just use last oid begin_oid = ids[i] end_oid = ids[-1] # Create querry, download features as json query = "query?where=OBJECTID%20%3E={}%20and%20OBJECTID%20%3C=%20{}&outFields=*&returnGeometry=true&f=json".format(begin_oid, end_oid) features_url = base_URL + query percent = float(batch - 1) / float(num_batches) * 100.0 print("Downloading {} to {} ({} of {}, {:.1f}%)".format(begin_oid, end_oid, batch, num_batches, percent,)) batch_json = get_json(features_url) # If the dictionary's structure already exists, just extend the features # list. Otherwise, deepcopy the downloaded dict to features_json. # We can do this because the other information about the feature set # is the same from batch to batch. if 'features' in features_json.keys(): features_json['features'].extend(batch_json['features']) else: features_json = copy.deepcopy(batch_json) batch += 1 # Write json out to text file then import it to feature class using arcpy print("Creating single JSON string...") json_fname = "json_{}.json".format(date) json_output = os.path.join(tempfile.gettempdir(), json_fname) json_string = json.dumps(features_json) del features_json # Delete feature dictionary to save lots of memory print("Writing JSON to temp file...") with open(json_output, 'a') as jfile: jfile.write(json_string) del json_string # Delete feature string to save lots of memory print("Converting JSON to feature class...") arcpy.JSONToFeatures_conversion(json_output, output_fc) # Clean up after ourselves os.remove(json_output)
def downloadRestFeatures(url, queryLayer, query, outName): """ #https://gis.stackexchange.com/questions/324513/converting-rest-service-to-file-geodatabase-feature-class can export a map service to fc optionally add a query or selection layer to limit ouput or leave those as "" to get the whole thing returns the new fc """ #the parameters for the map service query if not query: query = '1=1' params = { 'where': query, 'outFields': '*', 'f': 'pjson', 'returnGeometry': True } if queryLayer: spatial_ref = arcpy.Describe(queryLayer).spatialReference dissolved = arcpy.Dissolve_management(queryLayer, "dissolved") arcpy.AddGeometryAttributes_management(dissolved, "EXTENT") with arcpy.da.SearchCursor( dissolved, ["OID@", "EXT_MIN_X", "EXT_MIN_Y", "EXT_MAX_X", "EXT_MAX_Y" ]) as sCur: for row in sCur: minX, minY, maxX, maxY = row[1], row[2], row[3], row[4] extent = (str(minX) + "," + str(minY) + "," + str(maxX) + "," + str(maxY)) params = { 'where': query, 'geometry': extent, 'geometryType': 'esriGeometryEnvelope ', 'inSR': spatial_ref, 'spatialRel': 'esriSpatialRelIntersects', 'outFields': '*', 'f': 'pjson', 'returnGeometry': True } #making the request r = requests.get(url, params) #read the data from the request to a json and write it to a file in a temp #directory data = r.json() dirpath = tempfile.mkdtemp() json_path = r"{}\mapService.json".format(dirpath) with open(json_path, 'w') as f: json.dump(data, f) f.close() r.close() #turn that json into a feature class! arcpy.JSONToFeatures_conversion(json_path, outName) shutil.rmtree(dirpath) return outName
def json2shp(url_string, filename): response = urlopen(url_string) data = response.read() txt_str = data.decode('utf-8') lines = txt_str.split("\r\n") file = path + '/' + filename + '.json' fx = open(file, "w") for line in lines: fx.write(line + "\n") fx.close() arcpy.JSONToFeatures_conversion(file, filename)
def add_layer_to_map_from_json(self, in_json, feature_name, map_obj, position='AUTO_ARRANGE'): try: feature_name = self.simplify_layer_name(feature_name) json_file_url = os.sep.join([self.base_utils.root_dir, 'json', '{}.json'.format(feature_name)]) with open(json_file_url, 'w') as outfile: json.dump(in_json, outfile) shp = self.get_local_source_by_name(feature_name) arcpy.JSONToFeatures_conversion(json_file_url, shp) return self.make_lyrx_from_feature(shp, feature_name, map_obj, position) except (RuntimeError, TypeError, ValueError): self.base_utils.add_warning_statement('WARNING: Could not add layer to map from JSON') return None
def defQueryExtractRequests(idMin, idMax): myQuery = "&where=objectid+>%3D+" + idMin + "+and+objectid+<%3D+" + idMax myParams = "query?geometryType=esriGeometryEnvelope&spatialRel=esriSpatialRelIntersects&relationParam=&outFields=*&returnGeometry=true&geometryPrecision=&outSR=&returnIdsOnly=false&returnCountOnly=false&orderByFields=&groupByFieldsForStatistics=&returnZ=false&returnM=false&returnDistinctValues=false&returnTrueCurves=false&f=pjson" myRequest = serviceURL + serviceMap + "/" + str( serviceLayerID) + "/" + myParams + myQuery response = urllib2.urlopen(myRequest) myJSON = response.read() # Write response to json text file foo = open(dataOutputName + idMin + ".json", "w+") foo.write(myJSON) foo.close() # Create Feature Class arcpy.JSONToFeatures_conversion(dataOutputName + idMin + ".json", ws + dataOutputName + idMin + ".shp")
def export_features_single(self, org_id, name, save_loc): """Export a single feature layer from ArcGIS Online and saves it in a File Geodatabase""" service = ''.join(e for e in name if e.isalnum()) gdb_name = 'export_' + service[:25] + '.gdb' service_folder = os.path.join(save_loc, name) os.makedirs(service_folder) json_folder = os.path.join(service_folder, 'json') os.makedirs(json_folder) arcpy.CreateFileGDB_management(service_folder, gdb_name, 'CURRENT') params = dict(token=self.token['token'], f='json') params_query = dict(token=self.token['token'], f='json', outFields='*', where='1=1', returnGeometry='true') actual_name = name.replace(" ", "%20") url = self.services + '/{}/arcgis/rest/services/{}/FeatureServer'.format( org_id, actual_name) item = self.make_request(url, params) for layer in item['layers']: layer_item_url = self.services + '/{}/arcgis/rest/services/{}/FeatureServer/{}/query'.format( org_id, actual_name, layer['id']) layer_item = self.make_request(layer_item_url, params_query) if 'error' in layer_item: raise Exception(items['error']['message'], items['error']['details']) else: json_source = os.path.join(json_folder, layer['name'] + ".json") with open(json_source, 'w') as fp: json_file = json.dump(obj=layer_item, fp=fp, indent=4) feature_class_name = ''.join(e for e in layer['name'][:25] if e.isalnum()) output = os.path.join(service_folder, gdb_name, feature_class_name) arcpy.JSONToFeatures_conversion(json_source, output) return
def getBoundaryLine(boundary, Path, objectName, extra=""): boundarys = json.loads(lineTemplate) boundarys['features'] = [] i = 0 for wayID in boundary: wayXML = GetUrl.getUrl(wayID) if (wayXML == ""): continue wayTree = ET.fromstring(wayXML) wayNode = [] nodes = {} # Initial boundarys list boundarys['features'].append({}) boundarys['features'][i]['attributes'] = ({}) boundarys['features'][i]['attributes']['FID'] = str(i) boundarys['features'][i]['attributes']['wayID'] = str(wayID) boundarys['features'][i]['geometry'] = ({}) boundarys['features'][i]['geometry']['paths'] = [] # Get lon & lat of all nodes for node in wayTree.findall('node'): nodes[node.get('id')] = [float(node.get('lon')), float(node.get('lat'))] # Get the order of nodes in way for nd in wayTree.find('way').findall('nd'): wayNode.append(nodes[nd.get('ref')]) boundarys['features'][i]['geometry']['paths'].append(wayNode) i = i + 1 # write boundary in Json jsObj = json.dumps(boundarys) fileObject = open(Path + objectName + extra + '.json', 'w') fileObject.write(jsObj) fileObject.close() if (os.path.exists(Path + objectName + extra + '.shp') == False): arcpy.JSONToFeatures_conversion(in_json_file=objectName + extra + '.json', out_features=objectName + extra + '.shp') if (os.path.exists(Path + objectName + extra + '_polygon.shp') == False): print(Path + objectName + extra + '_polygon.shp') arcpy.FeatureToPolygon_management(in_features=objectName + extra + '.shp', out_feature_class=objectName + extra + '_polygon.shp')
def createLayerFromAPI(url, name, OutputPath): outputLocation = OutputPath + "\\" + name response = urllib.request.urlopen(url) path_parent = os.path.dirname(arcpy.env.workspace) data = json.loads(response.read()) path = outputLocation + ".json" with open(path, 'w+') as f: json.dump(data, f, indent=2) outputLayer = outputLocation + ".shp" arcpy.JSONToFeatures_conversion(path, outputLayer, "POINT") path_to_layer = OutputPath + "\\" + name + "_layer.lyr" arcpy.MakeFeatureLayer_management(outputLayer, name) newlayer = arcpy.SaveToLayerFile_management(name, path_to_layer, "ABSOLUTE") p = arcpy.mp.ArcGISProject("CURRENT") m = p.activeMap lyr = m.addDataFromPath(newlayer) getLayerStyle(name, lyr) lyr.visible = True
print 'OIDs ' + str(e[0]) + ' through ' + str(e[len(e) - 1]) #create FGDB for output if not arcpy.Exists(os.path.join(chunkGDBPath, chunkGDBName)): arcpy.CreateFileGDB_management(chunkGDBPath, chunkGDBName, "10.0") if DEBUG: print "Created ESRI GDB " + os.path.join( chunkGDBPath, chunkGDBName) #convert chunk of JSON into shapefile using arcpy JSON conversion tool chunkGDBFCName = chunkGDBFCBase + str(e[0]) + '_' + str( e[len(e) - 1]) try: #catch if JSON conversion fails. Should probably try the download at least once more if arcpy.JSONToFeatures_conversion( os.path.join(responseFilePath, responseFileName), os.path.join(chunkGDBPath, chunkGDBName, chunkGDBFCName)) except arcpy.ExecuteError: print "Parsing JSON failed for chunk with OIDs " + str( i) + " to " + str(i + chunkLength - 1) + " : " + traceback.format_exc() #create an output file and append each chunk shapefile to it #if the output file doesn't exist, create it using the first chunk if not arcpy.Exists( os.path.join(chunkGDBPath, chunkGDBName, mergeGDBFCName)): arcpy.Copy_management( os.path.join(chunkGDBPath, chunkGDBName, chunkGDBFCName), os.path.join(chunkGDBPath, chunkGDBName, mergeGDBFCName)) else: try:
import urllib, json, arcpy userName = "******" password = "******" ipAddress = "YOUR_IP_ADDRESS" arcpy.env.workspace = "YOUR_WORKSPACE" GIS_SERVER_generateToken_URL = "YOUR_GIS_SERVER_generateToken_URL" LAYER_QUERY_URL = "YOUR_LAYER_QUERY_URL" output_name = "YOUR_OUTPUT_NAME" #Get the access token from the GIS server tokenURL = GIS_SERVER_generateToken_URL + "tokens/generateToken?username="******"&password="******"&f=json&ip=" + ipAddress token_response = urllib.urlopen(tokenURL) token_data = json.loads(token_response.read()) token = token_data['token'] url = LAYER_QUERY_URL + "&token=" + token response = urllib.urlopen(url) data = json.loads(response.read()) with open('downloaded_json.json', 'w') as outfile: json.dump(data, outfile) #Create the output geodatabase arcpy.CreateFileGDB_management(arcpy.env.workspace, "temp.gdb") #Create the output feature class or table arcpy.JSONToFeatures_conversion("downloaded_json.json", os.path.join("temp.gdb", output_name))
def export_features_bulk(self, org_id, content, export_loc): """Exports all feature layers from ArcGIS Online and saves them in a File Geodatabase""" items = content['items'] for z in items: if z['type'] == "Feature Service" and "Hosted Service" in z[ 'typeKeywords']: arcpy.AddMessage(z['title']) for i in items: if i['type'] == "Feature Service" and "Hosted Service" in i[ 'typeKeywords']: arcpy.AddMessage("\nSaving " + i['title']) arcpy.AddMessage("URL: " + i['url']) service = ''.join(e for e in i['title'] if e.isalnum()) service_folder = os.path.join(export_loc, service) os.makedirs(service_folder) json_folder = os.path.join(service_folder, 'json') os.makedirs(json_folder) gdb_name = 'export_' + service[:25] + '.gdb' arcpy.CreateFileGDB_management(service_folder, gdb_name, 'CURRENT') params = dict(token=self.token['token'], f='json') params_query = dict(token=self.token['token'], f='json', outFields='*', where='1=1', returnGeometry='true') actual_name = i['url'].split("/")[-2].replace(" ", "%20") url = self.services + '/{}/arcgis/rest/services/{}/FeatureServer'.format( org_id, actual_name) item = self.make_request(url, params) for layer in item['layers']: layer_item_url = self.services + '/{}/arcgis/rest/services/{}/FeatureServer/{}/query'.format( org_id, actual_name, layer['id']) layer_item = self.make_request(layer_item_url, params_query) if 'error' in layer_item: raise Exception(items['error']['message'], items['error']['details']) pass else: arcpy.AddMessage("converting features...") json_source = os.path.join(json_folder, layer['name'] + ".json") with open(json_source, 'w') as fp: json_file = json.dump(obj=layer_item, fp=fp, indent=4) feature_class_name = ''.join(e for e in layer['name'] if e.isalnum()) output = os.path.join(service_folder, gdb_name, feature_class_name) arcpy.JSONToFeatures_conversion(json_source, output) return
import arcpy import pandas as pd import urllib.request import os os.getcwd() arcpy.env.workspace = "c:/data" ## census tracts ## download data using SODA API format (geojson) urllib.request.urlretrieve( 'https://data.cityofnewyork.us/resource/i69b-3rdj.geojson?$limit=4000', 'test.geojson') ## convert downloaded file to shapefile arcpy.JSONToFeatures_conversion('test.geojson', os.path.join('test.shp')) ## 311 complaints ## download data using SODA API format (geojson) urllib.request.urlretrieve( "https://data.cityofnewyork.us/resource/fhrw-4uyv.geojson?$limit=5000&$where=complaint_type%20like%20%27%25HEAT%25%27", 'test2.geojson') arcpy.JSONToFeatures_conversion('test2.geojson', os.path.join('test2.shp'), 'POINT') arcpy.SpatialJoin_analysis("test", "test2", "in_memory/points_SpatialJoin") ## arcpy.SpatialJoin_analysis("test2", "test","in_memory/points_SpatialJoin2", "JOIN_ONE_TO_MANY", "KEEP_ALL", "", "INTERSECT") url_points = "https://data.cityofnewyork.us/resource/fhrw-4uyv.geojson?$limit=5000&$where=complaint_type%20like%20%27%25HEAT%25%27" url_poly = 'https://data.cityofnewyork.us/resource/i69b-3rdj.geojson?$limit=4000'
def process(self): #: Load table from web service using a RecordSet self.log.info('Loading UDOT data...') record_set = arcpy.RecordSet() record_set.load(secrets.TABLE_URL) traffic_dict = json.loads(record_set.JSON) #: traffic_dict['features'] is the actual table, but is list of nested dicts, all with the single outer key 'attributes' cleaned_traffic_dict = [ t['attributes'] for t in traffic_dict['features'] ] traffic_frame = pd.DataFrame.from_dict(cleaned_traffic_dict) #: Convert dates for .last() operation later traffic_frame['Date'] = pd.to_datetime(traffic_frame['Date']) multi_index_df = traffic_frame.set_index(['Station', 'Date']) station_ids = traffic_frame['Station'].unique() trend_columns = [f'D{i}' for i in range(1, 15)] avgs_df = pd.DataFrame(index=station_ids, columns=['AvgChange7D'], dtype=np.float64) for i in station_ids: working_df = multi_index_df.loc[i, :].last('7D').copy() avgs_df.loc[i, 'AvgChange7D'] = working_df['PercentChange'].mean() avgs_df.loc[i, 'StartDate'] = str(working_df.index[0]) avgs_df.loc[i, 'EndDate'] = str(working_df.index[-1]) fourteen_day_df = multi_index_df.loc[i, 'PercentChange'].last( '14D').copy() for d in range(14): day_column = f'D{d+1}' avgs_df.loc[i, day_column] = fourteen_day_df.iloc[d] #: Transpose so that the index becomes the keys and the rows are the values avgs_dict = avgs_df.T.to_dict() #: Load features into a feature set feature_set = arcpy.FeatureSet() feature_set.load(secrets.FEATURE_URL) feature_name = 'TrafficChanges' temp_json_path = os.path.join(arcpy.env.scratchFolder, 'features.json') temp_fc_path = os.path.join(arcpy.env.scratchGDB, 'features') sddraft_path = os.path.join(arcpy.env.scratchFolder, f'{feature_name}.sddraft') sd_path = sddraft_path[:-5] #: Make sure none of our files already exist paths = [sddraft_path, sd_path, temp_json_path, temp_fc_path] for item in paths: if arcpy.Exists(item): self.log.info(f'Deleting {item} prior to use...') arcpy.Delete_management(item) #: Save features to .json, load .json as a feature class self.log.info(f'Saving JSON to {temp_json_path}...') with open(temp_json_path, 'w') as json_file: json_file.write(feature_set.JSON) self.log.info(f'Creating temp feature class {temp_fc_path}...') arcpy.JSONToFeatures_conversion(temp_json_path, temp_fc_path) #: Add our new columns. self.log.info('Adding columns...') columns = [('DetectorStation', 'TEXT'), ('AvgChange7D', 'DOUBLE'), ('StartDate', 'TEXT'), ('EndDate', 'TEXT')] # trend_columns = [(f'D{i}', 'DOUBLE') for i in range(1, 15)] columns.extend([(d, 'DOUBLE') for d in trend_columns]) for col in columns: name, dtype = col arcpy.AddField_management(temp_fc_path, name, dtype) #: Update the temp feature class with new averages self.log.info('Updating feature class with new averages...') fields = ['DetectorStation', 'AvgChange7D', 'StartDate', 'EndDate'] fields.extend(trend_columns) with arcpy.da.UpdateCursor(temp_fc_path, fields) as ucursor: for row in ucursor: station = row[0] if station in avgs_dict: row[1] = avgs_dict[station]['AvgChange7D'] row[2] = avgs_dict[station]['StartDate'].split()[0] row[3] = avgs_dict[station]['EndDate'].split()[0] for column, index in zip(trend_columns, range(4, 18)): row[index] = avgs_dict[station][column] ucursor.updateRow(row) #: Add anchor points for the symbology self.log.info('Adding anchor points...') anchor_fields = ['DetectorStation', 'AvgChange7D', 'SHAPE@XY'] with arcpy.da.InsertCursor(temp_fc_path, anchor_fields) as icursor: null_island = (0, 0) icursor.insertRow(['AnchorLow', 25, null_island]) icursor.insertRow(['AnchorHigh', 100, null_island]) #: Overwrite existing AGOL service self.log.info(f'Connecting to AGOL as {secrets.USERNAME}...') gis = arcgis.gis.GIS('https://www.arcgis.com', secrets.USERNAME, secrets.PASSWORD) sd_item = gis.content.get(secrets.SD_ITEM_ID) #: Get project references #: Assume there's only one map in the project, remove all layers for clean map self.log.info(f'Getting map from {secrets.PROJECT_PATH}...') project = arcpy.mp.ArcGISProject(secrets.PROJECT_PATH) covid_map = project.listMaps()[0] for layer in covid_map.listLayers(): self.log.info(f'Removing {layer} from {covid_map.name}...') covid_map.removeLayer(layer) layer = covid_map.addDataFromPath(temp_fc_path) project.save() #: draft, stage, update, publish self.log.info(f'Staging and updating...') sharing_draft = covid_map.getWebLayerSharingDraft( 'HOSTING_SERVER', 'FEATURE', feature_name, [layer]) sharing_draft.exportToSDDraft(sddraft_path) arcpy.server.StageService(sddraft_path, sd_path) sd_item.update(data=sd_path) sd_item.publish(overwrite=True) #: Update item description self.log.info('Updating item description...') feature_item = gis.content.get(secrets.FEATURES_ITEM_ID) start_date = avgs_dict[station]['StartDate'].split()[0] end_date = avgs_dict[station]['EndDate'].split()[0] description = f'Traffic data obtained from UDOT; updates occur every morning. Data currently reflects traffic from {start_date} to {end_date}.' feature_item.update(item_properties={'description': description})
def json_to_featureclass(json_file, out_fc): """ converts a json file (.json) to a feature class """ if arcpyFound == False: raise Exception("ArcPy is required to use this function") return arcpy.JSONToFeatures_conversion(in_json_file=json_file, out_features=out_fc)[0]
#write query query = "?where={}&outFields={}&returnGeometry=true&f=json".format( where, outfields) #retrieve your service and your query service = URL + query response = urllib2.urlopen(service) myJSON = response.read() foo = open("jsonOutput.json", "wb") foo.write(myJSON) foo.close() arcpy.JSONToFeatures_conversion("jsonOutput.json", "test.shp") #create workbook excelworkbook = xlwt.Workbook() sheet = excelworkbook.add_sheet(sheet) #create colum heads sheet.write(0, 0, "Incident") sheet.write(0, 1, "Date") sheet.write(0, 2, "Mode") sheet.write(0, 3, "Count") num = 1 #parse through file and write excel sheet for row in arcpy.da.SearchCursor("test.shp", ["Incident", "Date", "Mode", "Count"]):
float('{}'.format(x2)), float('{}'.format(y1)) ],[ float('{}'.format(x1)), float('{}'.format(y1)) ],[ float('{}'.format(x1)), float('{}'.format(y2)) ] ] ] } } ]} json_fc = json.dumps(dict_fc) f = open("dict_fc.json","w") f.write(json_fc) f.close() hr=(datetime.datetime.now().strftime('%H:%M:%S')).replace(":","_") new_fc=r'C:\Users\jcruz\Documents\ArcGIS\Default.gdb\json_to_fc_{}'.format(hr) arcpy.JSONToFeatures_conversion("dict_fc.json",new_fc) layer_df=arcpy.MakeFeatureLayer_management(new_fc,"layer_Extend") sent1=arcpy.SelectLayerByLocation_management(Layer1,"INTERSECT",layer_df) #getcount=arcpy.management.GetCount(Layer1) X=[] Y=[] with arcpy.da.SearchCursor(Layer1,["SHAPE@XY"]) as cursor: for i in cursor: X.append(i[0][0]) Y.append(i[0][1]) averageX=sum(X)/len(X) averageY=sum(Y)/len(Y) df_x=[] df_y=[] with arcpy.da.SearchCursor(layer_df,["SHAPE@XY"]) as cursor_df:
def main(): for x in [tbcarac]: ftx = arcpy.JSONToFeatures_conversion(jsonCaract, 'in_memory\\ft_%s' % x) copy = arcpy.TableToTable_conversion(ftx, pathGdb, x)
#print files shps = [] for i in files: name = str(feature_name) + i.split("\\")[-1].replace(".json", "") print name output = os.path.join( data_location, flders[2], "%s.gdb" % GDBNAME, name ) #os.path.join(location.replace("\JSData",""),"Vectors",str(gdb_name),name)#.replace("\","/") print output shps.append(output) if arcpy.Exists(output): print "Avoid It!" #arcpy.JSONToFeatures_conversion(i,output) else: print "Do It! %s" % output arcpy.JSONToFeatures_conversion(i, output) #---------------------------------------------------------------------------# location = os.path.join(data_location, flders[1]) arcpy.env.workspace = location #r"F:\Drives\C\Winrock\Desktop\LMet\sixmaps\Data\Address\Vectors\New File Geodatabase.gdb" f = arcpy.ListFiles() files = [] for i in f: files.append(os.path.join(location, i)) #print files shps = [] for i in files: name = str(feature_name) + i.split("\\")[-1].replace(".json", "") print name output = os.path.join(data_location, flders[2], "%s.gdb" % GDBNAME, name) print output
def json_to_featureclass(json_file, out_fc): if hasArcPy: return arcpy.JSONToFeatures_conversion(in_json_file=json_file, out_features=out_fc)[0] return None
def ConvertJsontoFeature(JsonFeatures): #TODO arcpy.env.workspace = "C:\\Users\\jwelty\\Documents\\CED\\ced_spatial_data\\temp_files" arcpy.JSONToFeatures_conversion(JsonFeatures, "ClipTest.shp")
import arcpy import requests, os, datetime # download of daily (last 24h) earthquake data from ZAMG # alternative file types: csv # alternative time periods: lastweek, lastmonth url = "http://geoweb.zamg.ac.at/static/event/lastday.json" r = requests.get(url) with open("lastday.json", "wb") as f: f.write(r.content) # priming work with ArcGIS Pro 2.7 Project File p = arcpy.mp.ArcGISProject("zamg2jpeg.aprx") # arcpy Function converts json to Feature Layer. reason for choosing json over csv arcpy.JSONToFeatures_conversion("lastday.json", os.path.join(p.defaultGeodatabase, "quakes")) # Data Field calculations / type conversion for later use in Symbology arcpy.management.AddField('quakes', 'Magnitude', 'DOUBLE', '', '', '', 'Magnitude', '', '', '') expression = "try2float(!mag!)" code_block = """ def try2float(mag): if mag == "-": return 0 else: return float(mag)""" arcpy.management.CalculateField('quakes', 'Magnitude', expression, 'PYTHON3', code_block) # expression für Point size: exp("Magnitude")
def ConvertWMSandReturnFeatures(SciBaseID): overallcount = 0 ## List the locations were data will be stored #TODO testspace = "C:\\Users\\jwelty\\Documents\\CED\\ced_spatial_data\\temp_files" json_filePoly = testspace + "\\A_Poly_" + SciBaseID + ".json" json_fileLine = testspace + "\\A_Line_" + SciBaseID + ".json" json_fileLine1 = testspace + "\\A1_Line_" + SciBaseID + ".json" json_fileLineTest = testspace + "\\A_LineTest_" + SciBaseID + ".json" json_filePoint = testspace + "\\A_Point_" + SciBaseID + ".json" arcpy.env.workspace = testspace + "\\JsonConversion.gdb" fcPoly = testspace + "\\A_Poly_" + SciBaseID + ".shp" fc1Poly = testspace + "\\A_Poly_" + SciBaseID + "_Project.shp" fcLine = testspace + "\\A_Line_" + SciBaseID + ".shp" fc1Line = testspace + "\\A_Line_" + SciBaseID + "_Project.shp" fcPoint = testspace + "\\A_Point_" + SciBaseID + ".shp" fc1Point = testspace + "\\A_Point_" + SciBaseID + "_Project.shp" if os.path.isfile(json_filePoly): os.remove(json_filePoly) if os.path.isfile(json_fileLine): os.remove(json_fileLine) if os.path.isfile(json_fileLine1): os.remove(json_fileLine1) if os.path.isfile(json_filePoint): os.remove(json_filePoint) try: arcpy.Delete_management(fcPoly) except: print "Nothing to delete" try: arcpy.Delete_management(fc1Poly) except: print "Nothing to delete" try: arcpy.Delete_management(fcLine) except: print "Nothing to delete" try: arcpy.Delete_management(fc1Line) except: print "Nothing to delete" try: arcpy.Delete_management(fcPoint) except: print "Nothing to delete" try: arcpy.Delete_management(fc1Point) except: print "Nothing to delete" str_wfs_serverVal1 = "https://www.sciencebase.gov/catalogMaps/mapping/ows" strUsername1 = strUsername strPassword = strPwd str_wfs_server = str_wfs_serverVal1 + "/" + SciBaseID req = 'GetFeature' # request version = '1.0.0' service = 'WFS' typeName = 'footprint' maxfeatures = 200000 srsname = 'EPSG:4326' outputFormat = 'json' fc1 = "None" strWMS_URL = '%s?request=%s&version=%s&service=%s&typeName=%s&maxfeatures=%s&srsname=%s&outputFormat=%s' % \ (str_wfs_server, req, version, service, typeName,maxfeatures, srsname, outputFormat) sb = pysb.SbSession() # Create the ScienceBase session sb.login( strUsername1, strPassword ) # log in to ScienceBase (you don't need to login to do certain things). try: strResponseJSON = sb.getJson(strWMS_URL) #get the data except: return "Error" if strResponseJSON == "NoData": return "None" time.sleep(0.2) dictFeatures = strResponseJSON['features'] if "error" in dictFeatures: return i = 0 feattype = "" arrayParamFeaturesPoly = [] arrayParamFeaturesLine = [] arrayParamFeaturesPoint = [] esrijsonPoly = "" esrijsonPoint = "" esrijsonLine = "" featcntPoly = 1 featcntLine = 1 featcntPoint = 1 oidPolycnt = 1 oidPointcnt = 1 oidLinecnt = 1 oidPoly1 = "" oidPoly2 = "" oidLine1 = "" oidLine2 = "" oidPoint1 = "" oidPoint2 = "" xmax = 0 xmin = 0 ymax = 0 ymin = 0 xmid = 0 ymid = 0 #Prepare a list of spatial features to send back SpatialFeatures = [] z = open(json_filePoly, 'w') z.close() v = open(json_fileLine, 'w') v.close() v1 = open(json_fileLine1, 'w') v1.close() r = open(json_filePoint, 'w') r.close() q = open(json_fileLineTest, 'w') r.close() for dictFeature in dictFeatures: z = open(json_filePoly, 'a') v = open(json_fileLine, 'a') v1 = open(json_fileLine1, 'a') r = open(json_filePoint, 'a') DictJSONGeom1 = dictFeature['geometry'] feattype = DictJSONGeom1['type'] feattypePoly = "" feattypePoint = "" feattypeLine = "" print feattype if feattype == "Polygon" or feattype == "MultiPolygon": feattypePoly = "esriGeometryPolygon" if oidPolycnt == 1: esrijsonPoly = '{"displayFieldName":"","fieldAliases":{"OID":"OID","Name":"Name","Shape_Length":"Shape_Length","Shape_Area":"Shape_Area"},"geometryType":"' + feattypePoly + '","spatialReference":{"wkid":104199,"latestWkid":4326},"fields":[{"name":"OID","type":"esriFieldTypeOID","alias":"OID"},{"name":"Name","type":"esriFieldTypeString","alias":"Name","length":60},{"name":"Shape_Length","type":"esriFieldTypeDouble","alias":"Shape_Length"},{"name":"Shape_Area","type":"esriFieldTypeDouble","alias":"Shape_Area"}],"features":[' # Write the initial ESRI Json format z.write(str(esrijsonPoly)) if feattype == "Line" or feattype == "Polyline" or feattype == "MultiLineString": return "Failed" # feattypeLine = "esriGeometryPolyline" # if oidLinecnt == 1: # esrijsonLine = '{"displayFieldName":"","fieldAliases":{"OID":"OID","Name":"Name","Shape_Length":"Shape_Length"},"geometryType":"' + feattypeLine + '","spatialReference":{"wkid":104199,"latestWkid":4326},"fields":[{"name":"OID","type":"esriFieldTypeOID","alias":"OID"},{"name":"Name","type":"esriFieldTypeString","alias":"Name","length":60},{"name":"Shape_Length","type":"esriFieldTypeDouble","alias":"Shape_Length"}],"features":[' # Write the initial ESRI Json format # v.write(str(esrijsonLine)) if feattype == "Point" or feattype == "MultiPoint": return "Failed" # feattypePoint = "Multipoint" # if oidPointcnt == 1: # esrijsonPoint = '{"displayFieldName":"","fieldAliases":{"OID":"OID","Name":"Name"},"geometryType":"esriGeometryMultipoint","spatialReference":{"wkid":104199,"latestWkid":4326},"fields":[{"name":"OID","type":"esriFieldTypeOID","alias":"OID"},{"name":"Name","type":"esriFieldTypeString","alias":"Name","length":60}],"features":[' # Write the initial ESRI Json format # r.write(str(esrijsonPoint)) print feattypePoly if feattypePoly == "esriGeometryPolygon": oidPoly1 = '{"attributes":{"OID":' + str( oidPolycnt ) + ',"Name":"","Shape_Length":Null,"Shape_Area":Null},"geometry":{"rings":' oidPoly1a = '{"attributes":{"OID":' + str( oidPolycnt ) + ',"Name":"","Shape_Length":Null,"Shape_Area":Null},"geometry":{"rings":[[' oidPoly2 = '}},{"attributes":{"OID":' + str( oidPolycnt ) + ',"Name":"","Shape_Length":Null,"Shape_Area":Null},"geometry":{"rings":' oidPoly2a = ']]}},{"attributes":{"OID":' + str( oidPolycnt ) + ',"Name":"","Shape_Length":Null,"Shape_Area":Null},"geometry":{"rings":[[' if oidPolycnt == 1: esrijsonPoly = esrijsonPoly + oidPoly1 # Add geometry z.write(str(oidPoly1a)) else: esrijsonPoly = esrijsonPoly + oidPoly2 # Add additional geometryDictJSONGeom1 = dictFeature['geometry'] z.write(str(oidPoly2a)) strRingsArray = DictJSONGeom1['coordinates'] strRingsArray = str(strRingsArray) strRingsArray = strRingsArray[ 1:-1] # remove extra brakets for Polygons strRingsArray2 = strRingsArray[2:-2] strRingsArray3 = strRingsArray2.split("]") j = 0 strRingsArray1 = [] for array3 in strRingsArray3: arraytest = array3[0:5] array3 = array3.replace("[", "") array4 = array3.replace(",", "") array4 = array4.replace(" ", ", ") if j == 0: arrayf = "[" + array3 + "]" arrayf1 = "[" + array4 + "]" strRingsArray1.append(eval(arrayf.strip())) z.write(str(arrayf1)) j = 1 else: if array3 > "": if arraytest == ", [[[": oidPolycnt = oidPolycnt + 1 oidPoly2 = '}},{"attributes":{"OID":' + str( oidPolycnt ) + ',"Name":"","Shape_Length":Null,"Shape_Area":Null},"geometry":{"rings":' oidPoly3 = ']]}},{"attributes":{"OID":' + str( oidPolycnt ) + ',"Name":"","Shape_Length":Null,"Shape_Area":Null},"geometry":{"rings":[[' # strRingsArray1.append(oidPoly2) z.write(str(oidPoly3)) else: z.write(str(", ")) arrayf = "[" + array3[2:] + "]" arrayf1 = "[" + array4[2:] + "]" z.write(str(arrayf1)) if arrayf != "]": strRingsArray1.append(eval(arrayf.strip())) for x, y in strRingsArray1: if xmax == 0: xmax = x else: if xmax < x: xmax = x if xmin == 0: xmin = x else: if xmin > x: xmin = x if ymax == 0: ymax = y else: if ymax < y: ymax = y if ymin == 0: ymin = y else: if ymin > y: ymin = y esrijsonPoly = esrijsonPoly + strRingsArray # Add geometry data oidPolycnt = oidPolycnt + 1 elif feattypeLine == "esriGeometryPolyline": oidLine1 = '{"attributes":{"OID":' + str( oidLinecnt ) + ',"Name":"","Shape_Length":Null},"geometry":{"paths":' oidLine1a = '{"attributes":{"OID":' + str( oidLinecnt ) + ',"Name":"","Shape_Length":Null},"geometry":{"paths":[[' oidLine2 = '}},{"attributes":{"OID":' + str( oidLinecnt ) + ',"Name":"","Shape_Length":Null},"geometry":{"paths":' oidLine2a = ']]}},{"attributes":{"OID":' + str( oidLinecnt ) + ',"Name":"","Shape_Length":Null},"geometry":{"paths":[[' if oidLinecnt == 1: esrijsonLine = esrijsonLine + oidLine1 # Add geometry v.write(str(oidLine1a)) else: esrijsonLine = esrijsonLine + oidLine2 # Add additional geometry v.write(str(oidLine2a)) DictJSONGeom1 = dictFeature['geometry'] strRingsArray = DictJSONGeom1['coordinates'] strRingsArray = str(strRingsArray) strRingsArray2 = strRingsArray[2:-2] strRingsArray3 = strRingsArray2.split(", [-1, [") strRingsArray4 = str(strRingsArray3[0]) strRingsArray4 = strRingsArray4.replace("[", "") strRingsArray4 = strRingsArray4.replace("], ", ";") strRingsArray4 = str(strRingsArray4) strRingsArray9 = strRingsArray4.split("];") arraycnt = 0 for str9 in strRingsArray9: q = open(json_fileLineTest, 'a') q.write("Arraycnt: " + str(arraycnt)) q.close() if arraycnt == 1: oidLinecnt = oidLinecnt + 1 oidLine4 = ']]}},{"attributes":{"OID":' + str( oidLinecnt ) + ',"Name":"","Shape_Length":Null},"geometry":{"paths":[[' v.write(str(oidLine4)) str9 = str(str9) strRingsArray5 = str9.replace("]", "") q = open(json_fileLineTest, 'a') q.write("strRingsArray5: " + str(strRingsArray5)) q.close() strRingsArray5 = strRingsArray5.split(";") j = 0 strRingsArray1 = [] for array3 in strRingsArray5: arraytest = array3[0:5] array3 = array3.replace("[", "") array4 = array3.replace(",", "") array4 = array4.replace(" ", ", ") array4 = array4.replace("]", "") if j == 0: arrayf = "[" + array3 + "]" arrayf1 = "[" + array4 + "]" strRingsArray1.append(eval(arrayf.strip())) v.write(str(arrayf1)) j = 1 else: if array3 != "": if arraytest == ", [[[": oidLinecnt = oidLinecnt + 1 oidLine2 = '}},{"attributes":{"OID":' + str( oidLinecnt ) + ',"Name":"","Shape_Length":Null},"geometry":{"paths":' oidLine3 = ']]}},{"attributes":{"OID":' + str( oidLinecnt ) + ',"Name":"","Shape_Length":Null},"geometry":{"paths":[[' # strRingsArray1.append(oidPoly2) v.write(str(oidLine3)) else: v.write(str(", ")) arrayf = "[" + array3 + "]" arrayf1 = "[" + array4 + "]" v.write(str(arrayf1)) if arrayf != "]": strRingsArray1.append(eval(arrayf.strip())) arraycnt = arraycnt + 1 for x, y in strRingsArray1: if xmax == 0: xmax = x else: if xmax < x: xmax = x if xmin == 0: xmin = x else: if xmin > x: xmin = x if ymax == 0: ymax = y else: if ymax < y: ymax = y if ymin == 0: ymin = y else: if ymin > y: ymin = y esrijsonLine = esrijsonLine + strRingsArray # Add geometry data oidLinecnt = oidLinecnt + 1 elif feattypePoint == "Multipoint": if oidPointcnt == 1: oidPoint1 = '{"attributes":{"OID":' + str( oidPointcnt) + ',"Name":""},"geometry":{"points":[[' DictJSONGeom1 = dictFeature['geometry'] feattype = DictJSONGeom1['type'] strRingsArray = DictJSONGeom1['coordinates'] strRingsArray = str(strRingsArray) strRingsArray = strRingsArray.replace("[", "") strRingsArray = strRingsArray.replace("], ", ";") strRingsArray = strRingsArray.split(";") try: for ads in strRingsArray: ads1 = str(ads) ads2 = ads1.replace(", ", ";") ads2 = str(ads2) ads3 = ads2.split(";") x = ads3[0] y = ads3[1] if oidPointcnt == 1 and overallcount == 0: oidPoint2 = '{"attributes":{"OID":' + str( oidPointcnt ) + ',"Name":""},"geometry":{"points":[[' r.write(str(oidPoint2) + str(x) + ',' + str(y)) oidPointcnt = oidPointcnt + 1 overallcount = 1 else: oidPoint3 = ']]}},{"attributes":{"OID":' + str( oidPointcnt ) + ',"Name":""},"geometry":{"points":[[' r.write(oidPoint3 + str(x) + ',' + str(y)) oidPointcnt = oidPointcnt + 1 except: badluck = "Badluck" if xmax == 0: xmax = x else: if xmax < x: xmax = x if xmin == 0: xmin = x else: if xmin > x: xmin = x if ymax == 0: ymax = y else: if ymax < y: ymax = y if ymin == 0: ymin = y else: if ymin > y: ymin = y # esrijsonPoint = esrijsonPoint + strRingsArray # Add geometry data # oidPointcnt = oidPointcnt + 1 z.close() v.close() r.close() i += 1 if (i > 50): break xmid = (xmax + xmin) / 2 ymid = (ymax + ymin) / 2 xdiff = (xmax - xmin) ydiff = (ymax - ymin) zoomlevel = 1.5 if xdiff > ydiff: zoomlevel = xdiff else: zoomlevel = ydiff zoom = 7 if zoomlevel < 0.01: zoom = 11 elif zoomlevel >= 0.01 and zoomlevel < 0.1: zoom = 10 elif zoomlevel >= 0.1 and zoomlevel < 0.25: zoom = 9 elif zoomlevel >= 0.25 and zoomlevel < 1.0: zoom = 8 elif zoomlevel >= 1.0 and zoomlevel < 3.0: zoom = 7 elif zoomlevel >= 3.0 and zoomlevel < 6.0: zoom = 6 elif zoomlevel >= 6.0 and zoomlevel < 12.0: zoom = 5 elif zoomlevel >= 12.0: zoom = 4 SpatialFeatures.append(str(xmid)) SpatialFeatures.append(str(ymid)) SpatialFeatures.append(str(zoom)) if esrijsonPoly != "": esrijsonPoly = esrijsonPoly + '}}]}' z = open(json_filePoly, 'a') z.write(']]}}]}') z.close() if esrijsonLine != "": esrijsonLine = esrijsonLine + '}}]}' v = open(json_fileLine, 'a') v.write(']]}}]}') v.close() v = open(json_fileLine, 'r') v1 = open(json_fileLine1, 'ab') print "Writing Lines" for line in v: v1.write(line.replace('][', '], [')) v.close() v1.close() if os.path.isfile(json_fileLine): os.remove(json_fileLine) v1 = open(json_fileLine1, 'r') v = open(json_fileLine, 'ab') print "Writing Lines" for line in v1: v.write(line) v.close() v1.close() if esrijsonPoint != "": esrijsonPoint = esrijsonPoint + '}}]}' r = open(json_filePoint, 'a') r.write('}}]}') r.close() with open(json_filePoint, "r") as myfile: datareplace = myfile.read().replace('\n', '') datareplace1 = datareplace.replace("]]]]", "]]") r = open(json_filePoint, 'w') r.write(datareplace1) r.close() # Write the feature files if esrijsonPoly != "": ## Convert Json to feature arcpy.env.workspace = testspace + "\\JsonConversion.gdb" res = arcpy.JSONToFeatures_conversion(json_filePoly, fcPoly) try: arcpy.Delete_management(fc1Poly) except: print "The projected feature is not deleting" outCS = arcpy.SpatialReference( 'WGS 1984 Web Mercator (Auxiliary Sphere)') res1 = arcpy.Project_management(fcPoly, fc1Poly, outCS) SpatialFeatures.append(fc1Poly) try: arcpy.Delete_management(fcPoly) except: print "Nothing to delete" if esrijsonLine != "": ## Convert Json to feature arcpy.env.workspace = testspace + "\\JsonConversion.gdb" res = arcpy.JSONToFeatures_conversion(json_fileLine, fcLine) try: arcpy.Delete_management(fc1Line) except: print "The projected feature is not deleting" outCS = arcpy.SpatialReference( 'WGS 1984 Web Mercator (Auxiliary Sphere)') res1 = arcpy.Project_management(fcLine, fc1Line, outCS) SpatialFeatures.append(fc1Line) try: arcpy.Delete_management(fcLine) except: print "Nothing to delete" if esrijsonPoint != "": ## Convert Json to feature arcpy.env.workspace = testspace + "\\JsonConversion.gdb" res = arcpy.JSONToFeatures_conversion(json_filePoint, fcPoint) try: arcpy.Delete_management(fc1Point) except: print "The projected feature is not deleting" outCS = arcpy.SpatialReference( 'WGS 1984 Web Mercator (Auxiliary Sphere)') res1 = arcpy.Project_management(fcPoint, fc1Point, outCS) SpatialFeatures.append(fc1Point) try: arcpy.Delete_management(fcPoint) except: print "Nothing to delete" strRingsArray1 = [] return SpatialFeatures
def main(): global count_tries global max_tries global sleep_time global service_output_name_tracking_list global output_type start_time = datetime.datetime.today() try: # arcgis toolbox parameters service_endpoint = arcpy.GetParameterAsText( 0) # String - URL of Service endpoint required output_workspace = arcpy.GetParameterAsText( 1) # String - gdb/folder to put the results required max_tries = arcpy.GetParameter( 2) # Int - max number of retries allowed required sleep_time = arcpy.GetParameter( 3) # Int - max number of retries allowed required` strict_mode = arcpy.GetParameter( 4) # Bool - JSON check True/False required username = arcpy.GetParameterAsText(5) # String - username optional password = arcpy.GetParameterAsText(6) # String - password optional referring_domain = arcpy.GetParameterAsText( 7) # String - url of auth domain existing_token = arcpy.GetParameterAsText( 8) # String - valid token value query_str = arcpy.GetParameterAsText( 9) # String - valid SQL query string sanity_max_record_count = 10000 # to query by geometry need [xmin,ymin,xmax,ymax], spatial reference, and geometryType (eg esriGeometryEnvelope service_output_name_tracking_list = [] if service_endpoint == '': output_msg( "Avast! Can't plunder nothing from an empty url! Time to quit." ) sys.exit() if not type(strict_mode) is bool: strict_mode = True if not type(max_tries) is int: max_tries = int(max_tries) if not type(sleep_time) is int: sleep_time = int(sleep_time) if query_str: query_str = urllib.quote(query_str) if output_workspace == '': output_workspace = os.getcwd() output_desc = arcpy.Describe(output_workspace) output_type = output_desc.dataType if output_type == "Folder": # To Folder output_folder = output_workspace else: output_folder = output_desc.path adapter_name = get_adapter_name(service_endpoint) token_client_type = 'requestip' if referring_domain != '': referring_domain = referring_domain.replace('http:', 'https:') token_client_type = 'referer' else: referring_domain = get_referring_domain(service_endpoint) if referring_domain == r"https://www.arcgis.com": token_client_type = 'referer' # build a generic opener with the use agent spoofed opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] urllib2.install_opener(opener) token = '' if username and not existing_token: token = get_token(username=username, password=password, referer=referring_domain, adapter_name=adapter_name, client_type=token_client_type) elif existing_token: token = existing_token tokenstring = '' if len(token) > 0: tokenstring = '&token=' + token output_msg("Start the plunder! {0}".format(service_endpoint)) output_msg("We be stashing the booty in {0}".format(output_workspace)) service_layers_to_get = get_all_the_layers(service_endpoint, tokenstring) output_msg("Blimey, {} layers for the pillagin'".format( len(service_layers_to_get))) for slyr in service_layers_to_get: count_tries = 0 downloaded_fc_list = [] # for file merging. response = None current_iter = 0 max_record_count = 0 feature_count = 0 final_fc = '' output_msg("Now pillagin' yer data from {0}".format(slyr)) service_info_call = urllib2.urlopen(slyr + '?f=json' + tokenstring).read() if service_info_call: service_info = json.loads(service_info_call, strict=False) else: raise Exception( "'service_info_call' failed to access {0}".format(slyr)) if not service_info.get('error'): # add url to info service_info[u'serviceURL'] = slyr # assume JSON supported supports_json = True if strict_mode: # check JSON supported supports_json = False if 'supportedQueryFormats' in service_info: supported_formats = service_info.get( 'supportedQueryFormats').split(",") for data_format in supported_formats: if data_format == "JSON": supports_json = True break else: output_msg( 'Strict mode scuttled, no supported formats') objectid_field = "OBJECTID" if 'fields' in service_info: field_list = service_info.get('fields') if field_list: for field in field_list: ftype = field.get('type') if ftype == 'esriFieldTypeOID': objectid_field = field.get('name') else: output_msg("No field list - come about using {0}!".format( objectid_field)) # get count if query_str == '': feature_count_call = urllib2.urlopen( slyr + '/query?where=1%3D1&returnCountOnly=true&f=pjson' + tokenstring).read() else: feature_count_call = urllib2.urlopen( slyr + '/query?where=' + query_str + '&returnCountOnly=true&f=pjson' + tokenstring).read() if feature_count_call: feature_count = json.loads(feature_count_call) service_info[u'FeatureCount'] = feature_count.get('count') service_name_cl = make_service_name(service_info, output_workspace, len(output_folder)) info_filename = service_name_cl + "_info.txt" info_file = os.path.join(output_folder, info_filename) # write out the service info for reference with open(info_file, 'w') as i_file: json.dump(service_info, i_file, sort_keys=True, indent=4, separators=(',', ': ')) output_msg("Yar! {0} Service info stashed in '{1}'".format( service_name_cl, info_file)) if supports_json: try: # to query using geometry,&geometry= &geometryType= esriGeometryEnvelope &inSR= and probably spatial relationship and buffering feat_data_query = r"/query?outFields=*&returnGeometry=true&returnIdsOnly=false&returnCountOnly=false&objectIds=&time=&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&distance=&units=esriSRUnit_Meter&maxAllowableOffset=&geometryPrecision=&outSR=&returnExtentOnly=false&orderByFields=&groupByFieldsForStatistics=&outStatistics=&resultOffset=&resultRecordCount=&returnZ=false&returnM=false&f=json" + tokenstring if query_str == '': feat_OIDLIST_query = r"/query?where=" + objectid_field + r"+%3E+0&returnGeometry=false&returnIdsOnly=true&returnCountOnly=false&returnExtentOnly=false&objectIds=&time=&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&distance=&units=esriSRUnit_Meter&outFields=&maxAllowableOffset=&geometryPrecision=&outSR=&orderByFields=&groupByFieldsForStatistics=&outStatistics=&resultOffset=&resultRecordCount=&returnZ=false&returnM=false&f=json" + tokenstring else: feat_OIDLIST_query = r"/query?where=" + query_str + r"&returnGeometry=false&returnIdsOnly=true&returnCountOnly=false&returnExtentOnly=false&objectIds=&time=&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&distance=&units=esriSRUnit_Meter&outFields=&maxAllowableOffset=&geometryPrecision=&outSR=&orderByFields=&groupByFieldsForStatistics=&outStatistics=&resultOffset=&resultRecordCount=&returnZ=false&returnM=false&f=json" + tokenstring max_record_count = service_info.get( 'maxRecordCount' ) # maximum number of records returned by service at once if max_record_count > sanity_max_record_count: output_msg( "{0} max records is a wee bit large, using {1} instead..." .format(max_record_count, sanity_max_record_count)) max_record_count = sanity_max_record_count # extract using actual OID values is the safest way feature_OIDs = None feature_OID_query = json.loads( urllib2.urlopen(slyr + feat_OIDLIST_query).read()) if feature_OID_query and 'objectIds' in feature_OID_query: feature_OIDs = feature_OID_query["objectIds"] else: output_msg("Blast, no OID values: {}".format( feature_OID_query)) if feature_OIDs: OID_count = len(feature_OIDs) sortie_count = OID_count // max_record_count + ( OID_count % max_record_count > 0) output_msg( "{0} records, in chunks of {1}, err, that be {2} sorties. Ready lads!" .format(OID_count, max_record_count, sortie_count)) feature_OIDs.sort() # chunk them for group in grouper(feature_OIDs, max_record_count): # reset count_tries count_tries = 0 start_oid = group[0] end_oid = group[max_record_count - 1] if end_oid is None: # reached the end of the iterables # loop through and find last oid, need this due to fillvalue of None in grouper for i in reversed(group): if i is not None: end_oid = i break # >= %3E%3D, <= %3C%3D if query_str == '': where_clause = "&where={0}+%3E%3D+{1}+AND+{2}+%3C%3D+{3}".format( objectid_field, str(start_oid), objectid_field, str(end_oid)) else: where_clause = "&where={0}+AND+{1}+%3E%3D+{2}+AND+{3}+%3C%3D+{4}".format( query_str, objectid_field, str(start_oid), objectid_field, str(end_oid)) # response is a string of json with the attributes and geometry query = slyr + feat_data_query + where_clause response = get_data( query) # expects json object if not response.get('features'): raise ValueError( "Abandon ship! Data access failed! Check what ye manag'd to plunder before failure." ) else: feature_dict = response[ "features"] # load the features so we can check they are not empty if len(feature_dict) != 0: # convert response to json file on disk then to gdb/shapefile (is fast) # can hit long filename issue!!!! # look at an arcpy.FeatureSet() to hold the data # some services produce JSON that errors a FeatureSet() ##fs = arcpy.FeatureSet() ##fs.load(response) out_JSON_name = service_name_cl + str( current_iter) + ".json" out_JSON_file = os.path.join( output_folder, out_JSON_name) with codecs.open( out_JSON_file, 'w', 'utf-8') as out_file: data = json.dumps( response, ensure_ascii=False) out_file.write(data) output_msg( "Nabbed some json data fer ye: '{0}', oids {1} to {2}" .format(out_JSON_name, start_oid, end_oid)) if output_type == "Folder": out_file_name = service_name_cl + str( current_iter) + ".shp" else: out_file_name = service_name_cl + str( current_iter) out_geofile = os.path.join( output_workspace, out_file_name) output_msg( "Converting yer json to {0}". format(out_geofile)) # may not be needed if using a featureSet() arcpy.JSONToFeatures_conversion( out_JSON_file, out_geofile) ##arcpy.JSONToFeatures_conversion(fs, out_geofile) downloaded_fc_list.append(out_geofile) os.remove(out_JSON_file ) # clean up the JSON file current_iter += 1 else: raise ValueError( "Aaar, plunderin' failed, feature OIDs is None" ) # download complete, create a final output if output_type == "Folder": final_fc = os.path.join(output_workspace, service_name_cl + ".shp") else: final_fc = os.path.join(output_workspace, service_name_cl) output_msg( "Stashin' all the booty in '{0}'".format(final_fc)) #combine all the data combine_data(fc_list=downloaded_fc_list, output_fc=final_fc) create_layer_file(service_info=service_info, service_name=service_name_cl, layer_source=final_fc, output_folder=output_folder) elapsed_time = datetime.datetime.today() - start_time output_msg("{0} plundered in {1}".format( final_fc, str(elapsed_time))) except ValueError, e: output_msg(str(e), severity=2) except Exception, e: line, err = trace() output_msg("Script Error\n{0}\n on {1}".format( err, line), severity=2) output_msg(arcpy.GetMessages()) finally: if arcpy.Exists(final_fc): data_count = int( arcpy.GetCount_management(final_fc)[0]) if data_count == OID_count: #we got it all output_msg("Scrubbing the decks...") for fc in downloaded_fc_list: arcpy.Delete_management(fc) else: output_msg( "Splicin' the data failed - found {0} but expected {1}. Check {2} to see what went wrong." .format(data_count, OID_count, final_fc))
def mainFunction( mapServiceLayer, outputFeatureClass, updateMode ): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Querying thet map service to get the count of records arcpy.AddMessage("Querying the map service...") mapServiceQuery1 = mapServiceLayer + "/query?where=1%3D1&returnIdsOnly=true&f=pjson" urlResponse = urllib.urlopen(mapServiceQuery1) # Get json for the response - Object IDs mapServiceQuery1JSONData = json.loads(urlResponse.read()) objectIDs = mapServiceQuery1JSONData["objectIds"] objectIDs.sort() arcpy.AddMessage("Number of records in the layer - " + str(len(objectIDs)) + "...") # Set the number of records per request and the number of requests that need to be made maxRecords = 1000 # If under maxRecords, just need to make one request if (len(objectIDs) < maxRecords): requestsToMake = 1 else: # Calculate the number of requests - Always round up requestsToMake = math.ceil( float(len(objectIDs)) / float(maxRecords)) arcpy.AddMessage("Downloading data to " + arcpy.env.scratchFolder + "...") # For every request count = 0 while (int(requestsToMake) > count): # Create the query startObjectID = int(objectIDs[count * maxRecords]) # If at the final request or if there is only one request that needs to be made if ((int(requestsToMake) == (count + 1)) or (requestsToMake == 1)): # Get the last object ID endObjectID = int(objectIDs[len(objectIDs) - 1]) serviceQuery = "OBJECTID>%3D" + str( startObjectID) + "+AND+OBJECTID<%3D" + str(endObjectID) else: # Start object ID plus 1000 records endObjectID = int(objectIDs[(count * maxRecords) + maxRecords]) serviceQuery = "OBJECTID>%3D" + str( startObjectID) + "+AND+OBJECTID<" + str(endObjectID) # Query the map service to data in json format try: mapServiceQuery2 = mapServiceLayer + "/query?where=" + serviceQuery + "&returnCountOnly=false&returnIdsOnly=false&returnGeometry=true&outFields=*&f=pjson" response = urllib2.urlopen(mapServiceQuery2) except urllib2.URLError, e: arcpy.AddError("There was an error: %r" % e) # Download the data fileChunk = 16 * 1024 downloadedFile = os.path.join( arcpy.env.scratchFolder, "Data-" + str(uuid.uuid1()) + ".json") with open(downloadedFile, 'wb') as file: downloadCount = 0 while True: chunk = response.read(fileChunk) # If data size is small if ((downloadCount == 0) and (len(chunk) < 1000)): # Log error and end download arcpy.AddError("No data returned, check the URL...") sys.exit() if not chunk: break # Write chunk to output file file.write(chunk) downloadCount = downloadCount + 1 file.close() # If it's the first request if (count == 0): # Create new dataset arcpy.JSONToFeatures_conversion( downloadedFile, os.path.join(arcpy.env.scratchGDB, "Dataset")) else: # Create dataset and load into existing arcpy.JSONToFeatures_conversion(downloadedFile, "in_memory\\DatasetTemp") arcpy.Append_management( "in_memory\\DatasetTemp", os.path.join(arcpy.env.scratchGDB, "Dataset"), "NO_TEST", "", "") # If at the final request or if there is only one request that needs to be made if ((int(requestsToMake) == (count + 1)) or (requestsToMake == 1)): arcpy.AddMessage("Downloaded and converted JSON for " + str(len(objectIDs)) + " of " + str(len(objectIDs)) + " features...") else: arcpy.AddMessage("Downloaded and converted JSON for " + str((count + 1) * maxRecords) + " of " + str(len(objectIDs)) + " features...") count = count + 1 # Convert JSON to feature class arcpy.AddMessage("Copying over final dataset...") # Overwrite dataset if (updateMode.lower() == "new"): # Get record count recordCount = arcpy.GetCount_management( os.path.join(arcpy.env.scratchGDB, "Dataset")) arcpy.AddMessage("Number of records for " + outputFeatureClass + " - " + str(recordCount)) # Logging if (enableLogging == "true"): # Log record count logger.info("Number of records for " + outputFeatureClass + " - " + str(recordCount)) # Load in data if (recordCount > 0): arcpy.CopyFeatures_management( os.path.join(arcpy.env.scratchGDB, "Dataset"), outputFeatureClass, "", "0", "0", "0") # Delete and append else: # Get record count recordCount = arcpy.GetCount_management( os.path.join(arcpy.env.scratchGDB, "Dataset")) arcpy.AddMessage("Number of records for " + outputFeatureClass + " - " + str(recordCount)) # Logging if (enableLogging == "true"): # Log record count logger.info("Number of records for " + outputFeatureClass + " - " + str(recordCount)) # Load in data if (recordCount > 0): arcpy.DeleteFeatures_management(outputFeatureClass) arcpy.Append_management( os.path.join(arcpy.env.scratchGDB, "Dataset"), outputFeatureClass, "NO_TEST", "", "") # --------------------------------------- End of code --------------------------------------- # # If called from gp tool return the arcpy parameter if __name__ == '__main__': # Return the output if there is any if output: arcpy.SetParameterAsText(1, output) # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = []
def json_to_featureclass(json_file, out_fc): """ converts a json file (.json) to a feature class """ return arcpy.JSONToFeatures_conversion(in_json_file=json_file, out_features=out_fc)[0]
# Import arcpy module import arcpy # Local variables: json = "C:\\GeoEvent\\Input\\NOAA_Warnings\\NOAA_SVR_ShortTermWarnings.json" SDE_1 = " " # update with path to local ArcServer Feature Service SHP_1 = "C:\\GeoEvent\\Input\\NOAA_Warnings\\NOAA_SVR_ShortTermWarnings.shp" SHP_2 = "C:\\GeoEvent\\Input\\NOAA_Warnings\\NOAA_SVR_ShortTermWarnings.shp" SDE_2 = " " # update with path to local ArcServer Feature Service # Process: Delete arcpy.Delete_management(SHP_1, "ShapeFile") # Process: JSON To Features arcpy.JSONToFeatures_conversion(json, SHP_2) # Process: Delete Features arcpy.DeleteFeatures_management(SDE_1) # Process: Append arcpy.Append_management( "C:\\GeoEvent\\Input\\NOAA_Warnings\\NOAA_SVR_ShortTermWarnings.shp", SDE_2, "TEST", "", "")