def test(journey_locations): # get unfixed calamities in Princeton (as human-readable addresses) avoid_locations = seeclickfix.get_calamity_locations() print avoid_locations latLngs = [geodecode(loc) for loc in avoid_locations] linkids = [get_linkid(latLng) for latLng in latLngs] # Route around current problem on Nassau # journey_locations = ["Nassau and Chestnut, Princeton, NJ", # "Nassau and Scott, Princeton, NJ"] # routes is a list or "Route" objects, from the directions.py # library. It contains coordinate info for plotting # and "maneuver" info for the turn-by-turn directions routes = get_directions(journey_locations, avoid=linkids) #routes = get_directions(journey_locations) # Set up points to display calamities lngLats = [(lng, lat) for (lat, lng) in latLngs] pointies = [Pointy(lngLat) for lngLat in lngLats] #pointies = [] pointies+=routes # uncomment this line to display route geojsonio.display(pointies)
def display_geojson(self): coord = [] for truck in self._truck_list: for coordonnee in truck.get_coordinates(): coord.append(coordonnee) geo_object2 = {"type": "MultiPoint", "coordinates": coord} display(json.dumps(geo_object2))
def loadkml(self): #try: self.mesg = QMessageBox.information( self, 'Select KML file', 'Select KML file to open in Geojson MAPs.') fname = QFileDialog() fname.setFileMode(QFileDialog.AnyFile) fname.setNameFilter('KML files (*.kml)') print('casa') if fname.exec_(): self.fnamepath = fname.selectedFiles() self.pathfile = pathl[0] + '/database/temporary/json_files' print(self.pathfile) print(self.fnamepath[0]) kml2geojson.main.convert(self.fnamepath[0], self.pathfile) mapname = self.fnamepath[0] mapname = mapname.split('/') mapname = mapname[-1].split('.') mapname = mapname[0] + '.geojson' self.mappath = self.pathfile + '/' + mapname self.mappath = self.mappath.replace(os.sep, '/') self.maptemplate = open(self.mappath).read() self.view.close() gs.display(self.maptemplate, force_gist=True) self.view.load(QUrl(gs.display(self.maptemplate))) self.view.show() #except: pass
def simpleSpatialOps(op_type): respValues = {} req_data = request.get_json() #this is type 'dict' #need string obj for loads, use dumps() to create a JSON formatted string strObj_rdata = json.dumps(req_data) #need dict to collect separate features, and remove unicode fDict = json.loads(strObj_rdata, object_pairs_hook=processing_hook) feature1 = (fDict["poly1"]) feature2 = (fDict["poly2"]) #GeoDataFrame.from_features() accepts dict gdf_F1 = gpd.GeoDataFrame.from_features(feature1) gdf_F2 = gpd.GeoDataFrame.from_features(feature2) if op_type=='union': respValues["operation"] = op_type unionGDF = gpd.overlay(gdf_F1 , gdf_F2, how='union') displayResult = unionGDF.to_json() respValues["geoJSON"] = displayResult geojsonio.display(displayResult) #return op_type + "\n" + str(displayResult) elif op_type=='intersection': respValues["operation"] = op_type intersectGDF = gpd.overlay(gdf_F1, gdf_F2, how='intersection') displayResult = intersectGDF.to_json() geojsonio.display(displayResult) respValues["geoJSON"] = displayResult #return op_type + "\n" + str(displayResult) else: return "operation not specified" responseJSON = json.dumps(respValues) ##TODO - this is a string object which isn't JSON format ##can successfully return as op_type + "\n" + str(displayResult) and have valid JSON ####but wanted to have it coming back in valid JSON in Response object resp = Response(responseJSON, status=200, mimetype='application/json') return resp
def main(): """The main method of the comparison command line tool. It will parse the arguments and send the requests. Lastly, the results will be displayed on the standard out and in the browser. """ # Parse arguments args = PARSER.parse_args() # Query best route for the car profile action = 'trip' if args.trip else 'route' req_car = _build_request(args.host, str(args.port_car), args.trip, args.coordinates, args.simplified) response_car = requests.get(req_car).json() print("Result for car profile: " + str(response_car) + '\n') geom = loads(json.dumps(response_car[action + 's'][0]['geometry'])) feature_car = Feature(geometry=geom, properties=PROPERTIES_CAR) waypoints_car = [Feature(geometry=Point(v['location']), properties={ "marker-color": COLOR_CAR, "marker-size": "large", "marker-symbol": v['waypoint_index'] + 1 } ) for v in response_car['waypoints']] # Query best route for the electric car profile req_electric = _build_request(args.host, str(args.port_electric), args.trip, args.coordinates, args.simplified) response_electric = requests.get(req_electric).json() print("Result for electric car profile: " + str(response_electric) + '\n') geom = loads(json.dumps(response_electric[action + 's'][0]['geometry'])) feature_electric = Feature(geometry=geom, properties=PROPERTIES_ELECTRIC) waypoints_electric = [Feature(geometry=Point(v['location']), properties={ "marker-color": COLOR_ELECTRIC, "marker-size": "small", "marker-symbol": v['waypoint_index'] + 1 } ) for v in response_electric['waypoints']] # Show output in browser feature_collection = FeatureCollection([feature_car, feature_electric] + waypoints_car + waypoints_electric) geojsonio.display(dumps(feature_collection)) print("------------ Comparison of routing profiles ------------") print("Power Consumption Car profile:\t\t\t" + str("%.2f" % (response_car[action + 's'][0]['distance'] / 1000)) + " kw") print("Power Consumption Electric Car profile:\t\t" + str("%.2f" % (response_electric[action + 's'][0]['distance'] / 1000)) + " kw") print("--------------------------------------------------------")
def writeJSON(cover_result): """ 输出覆盖结果报表,并将覆盖结果写入GeoJSON """ from collections import OrderedDict cover_schema = { 'geometry':'Polygon', 'properties':OrderedDict([ ('id', 'str'), ('res', 'int'), ('cloud', 'float') ]) } from fiona.crs import from_epsg cover_crs = from_epsg(4326) output_driver = "GeoJSON" print("Begin Write Results!") with fiona.open( 'test.json', 'w', driver = output_driver, crs = cover_crs, schema = cover_schema) as f: for i in range(len(cover_result)): sample = cover_result[i] cover_sample = { 'geometry': mapping(sample['geom']), 'properties': OrderedDict([ ('id', sample['id']), ('res', str(sample['res'])), ('cloud', sample['cloud']) ]) } f.write(cover_sample) # 显示 from geojsonio import display with open('test.json') as f: contents = f.read() display(contents)
def get_EV_map_df(): #this reads in some data files and outputs an interactive browser-based map of East Village buildings in the dataset pluto_EV = gpd.read_file('data/geojson/pluto_EV.geojson') #read in just East Village locations pred_df = pd.read_csv('data/all_with_predictions_unlogged.csv') pred_df = pred_df[['lat', 'lng', 'distance_to_garden', 'predicted']] #read in every property with predictions and trim to relevant columns pluto_data = SQL_functions.get_PLUTO() location_df = pd.DataFrame( pluto_data, columns=[ 'lot_id', 'block', 'lot', 'cd', 'zipcode', 'address', 'zonedist1', 'schooldist', 'splitzone', 'bldgclass', 'landuse', 'ownername', 'lotarea', 'lottype', 'numfloors', 'unitsres', 'yearbuilt', 'yearalter1', 'yearalter2', 'histdist', 'landmark', 'builtfar', 'residfar', 'lat', 'lng' ]) location_df = location_df.set_index('lot_id') #read in full puto set, name columns and set index location_df = location_df[['block', 'lot', 'lat', 'lng']] location_df = location_df.astype({'block': 'int64'}) location_df = location_df.astype({'lot': 'int64'}) location_df = location_df.astype({'lat': 'float'}) location_df = location_df.astype({'lng': 'float'}) #trim to only the columns we're interested in displaying on the map #then convert each column to the desired type pluto_EV = pluto_EV.merge(location_df, how='left', on=['block', 'lot']) pluto_EV = pluto_EV.merge(pred_df, how='left', on=['lat', 'lng']) #left merge the east village dataset with location and predictions pluto_EV = pluto_EV.dropna() #drop NA values pluto_EV = pluto_EV.to_json() #convery to json geojsonio.display(pluto_EV)
def parse_page(session, search_url, map_footprint): ''' loop pages and extract ids from each page ''' res = session.get(search_url) if map_footprint: url = geojsonio.display(res.text) page = res.json() for feature in page['features']: id = feature['id'] cc = feature['properties']['cloud_cover'] it = feature['properties']['item_type'] au = feature['_links']['assets'] ids.append(id) cloud_cover.append(cc) item_type.append(it) assets_url.append(au) next_url = page["_links"].get("_next") if next_url: parse_page(session, next_url, map_footprint)
buildingHeight = [] buildingWWR = [] buildingName = [] #Read geojson file and reproject to WGS84 cordinates system df_fp = gpd.read_file(footPrintJS) df_fp = df_fp.to_crs(epsg='4326') #Convert geo dataframe to json fp = df_fp.to_json() #Conver json to Python dict fp_dict = json.loads(fp) #Visualize building footprints through geojsonio geojsonio.display(fp) #Extract information from json files for each building for i in range(len(fp_dict['features'])): #Calculate building area using Python area package ar = area(fp_dict['features'][i]['geometry']) buildingArea.append(ar) with open(buildingJS) as bldg: bd_dict = json.load(bldg) for i in range(len(bd_dict['buildings'])): ht = bd_dict['buildings'][i]['height'] wwr = bd_dict['buildings'][i]['window-to-wall ratio'] name = bd_dict['buildings'][i]['name'] buildingHeight.append(ht)
), ( -45.887489318847656, -23.20979613466111 ), ( -45.87787628173828, -23.2315664097847 ), ( -45.85075378417969, -23.228096024018107 ), ( -45.86620330810547, -23.19244047577093 )]) #arregando as coordenadas com as variáveis states = gpd.GeoDataFrame(geometry=[p1, polygon]) #Carregando as coordenadas com arquivo .geojon #states = gpd.read_file('sjcinpe.geojson') states.loc[0, 'name'] = 'INPE' states.loc[0, 'marker-color'] = '#48ff00' print(states.head()) states = states.to_json() geojsonio.display(states)
def get_closest(self): self.boba['Coordinates'] = [Point(xy) for xy in zip(self.boba.Long, self.boba.Lat)] updated = self.get_gdf() display(updated.to_json())
def isochroner( data, key, # mapzen-3iGEB8a lat_field, lon_field, id_field=None, travel_type='pedestrian', polygons='true', travel_time=10, to_geojsonio=False): """Isochroner function. This function takes the user inputs and passes them to the Mapzen API. It then creates a GeoJSON output and optionally opens that output in geojson.io """ valid = True while valid: travel_types = ['bicycle', 'pedestrian', 'multimodal'] if travel_type not in travel_types: print(travel_type, "is not a valid input for travel_type.") print('Select from', travel_types) valid = False # Read in a CSV of locations from which to generate isochrones df = pd.read_csv(data) # Create and empty list into which isochrones should be placed isochrone_list = [] # Hit the Mapzen Isochrone API for each point i = 0 while i < len(df): if id_field is not None: id_field = i val = df.iloc[i, ] query_dict = { "locations": [{ "lat": val[lat_field], "lon": val[lon_field] }], "costing": travel_type, "contours": [{ "time": travel_time }], "polygons": polygons, } query_str = str(query_dict).strip().replace(" ", "").replace("'", "\"") payload = {"id": id_field, "api_key": key} print("query_str:", type(query_str)) id_str = '&id=' + str(id_field) print("id_str:", type(id_str)) key_str = '&api_key=' + key print("key_str:", type(key_str)) api_call = 'http://matrix.mapzen.com/isochrone?json=' + query_str print("api_call:", api_call) response = requests.get(api_call, params=payload) print("Response URL:", response.url) response_json = response.json() isochrone = response_json["features"][0] # Append response_json to response_list isochrone_list.append(isochrone) i += 1 time.sleep(1) isochrones = {"type": "FeatureCollection", "features": isochrone_list} if to_geojsonio: geojsonio.display(json.dumps(isochrones)) # Save isochrones to GeoJSON file current_time = datetime.now().strftime('%Y%m%d%H%M%S') with open('isochrones_{}.geojson'.format(current_time), 'w') as outfile: json.dump(isochrones, outfile) valid = False
def mapa(nombre): """Funcion para dibujar mapas para la presentacion""" for nom in nombre: with open(nom) as f: contents = f.read() display(contents)
help='Opens processing output in geojsonio if the operation makes sense' ) args = parser.parse_args() output = None if args.combine_polygons: geometry_collection_of_polygons = combine_all_polygons(args.csvpath) save_geojson('geom_collection.geojson', geometry_collection_of_polygons) output = geometry_collection_of_polygons if args.area: projected_polygons = convert_to_slippy_tile_coords(list( combine_all_polygons(args.csvpath)), zoom=21) print( str( math.ceil(sum([polygon.area for polygon in projected_polygons]))) + " total tiles at zoom level " + str(21) + " in this multipolygon area!") output = projected_polygons if args.inner: calculate_inner_coordinates_from_csvpath(csvpath=args.csvpath, zoom=21) if args.centroids: solardb.compute_centroid_distances() if args.osm_solar: solardb.query_and_persist_osm_solar( list(combine_all_polygons(args.csvpath))) if args.geojsonio and output is not None: geojsonio.display(geopandas.GeoSeries(output))
def display(self): geojsonio.display(self.gdf.to_json())
def simpleSpatialHome(): gdf_states = gpd.read_file(statesGeoJSON).to_json() geojsonio.display(gdf_states) return 'display States geoJSON file'
def display(dump): geojsonio.display(dump) return
import kml2geojson import json import pandas as pd import geojsonio kml2geojson.main.convert('./export.kml', './') geodata = pd.read_json('export.geojson') geojsonio.display(geodata)
# initalizes csv with list of bubble tea places to dataframe boba = pd.read_csv(filename) def get_coords(self): # gets latitude and longitudes of each place boba['Lat'] = boba['Address'].apply( geocoder.google).apply(lambda x: x.lat) boba['Longitude'] = boba['Address'].apply( geocoder.google).apply(lambda x: x.lng) # converts lat and long points to coordinate point data type boba['Coordinates'] = [ Point(xy) for xy in zip(boba.Longitude, boba.Lat) ] def get_geo(self): return (list(boba['Coordinates'])) def get_names(self): return (boba['Name']) def get_gdf(self): # coordinate system parameters crs = {'init': 'epsg:4326'} return (GeoDataFrame(get_names(), crs=crs, geometry=get_geo())) #def update(self): if __name__ == "__main__": display(geo_df.to_json())
from geojsonio import display with open( r'C:\Users\ithaca\Documents\Magda\Tool_MIE\SENTINEL-1_TOOL\AOI\AOI.geojson' ) as f: contents = f.read() display(contents) f.close()
def visualize(self,query): lat, lng = self.gmaps.address_to_latlng(query) self.boba['Coordinates'] = [Point(lng,lat)] updated = self.get_gdf() display(updated.to_json())
import requests import json import geojsonio # http://www.statweb.provincia.tn.it/indicatoristrutturalisubpro/exp.aspx?ntab=Sub_Numero_Incidenti&info=d&fmt=json # http://www.dati.gov.it/dataset/trasporti-pubblici-sgm-servizio-realtime/resource/6a3e4d4e-65b3-425c-8c1d-9e3e05054898 #http://servicemap.disit.org/WebAppGrafo/api/v1/?selection=43.7756;11.2490&categories=SensorSite;Car_park&maxResults=10&maxDists=0.2&lang=it&format=json payload = { 'selection': ';'.join([str(43.7756), str(11.2490)]), 'categories': ';'.join(['SensorSite', 'Car_park']), 'maxResults': 10, 'maxDists': 20, 'lang': 'it', 'format': 'json', #text: 'description', } r = requests.get('http://servicemap.disit.org/WebAppGrafo/api/v1/', params=payload) data = r.json() text = json.dumps(data['Services']) geojsonio.display(text) #text = json.dumps(data, indent=4, sort_keys=True) text = json.dumps(data) print(text)
from geojsonio import display with open('crbdist6.geojson') as f: contents = f.read() display(contents)
# colL1 = [bool(re.search(i,x)) for x in flux1.columns] # flux10 = flux1.loc[:,colL1] # fluxSM.loc[:'exit_' + str(i)] = flux1[ # fluxM.loc[:,colL].head() if False: from matplotlib.patches import Polygon as plPoly fig, ax = plt.subplots(figsize=(8, 8)) mpl_poly = plPoly(np.array(a.exterior), facecolor="g", lw=0, alpha=0.4) ax.add_patch(mpl_poly) ax.relim() ax.autoscale() plt.show() a.plot(ax=ax,color='red'); l.plot(ax=ax, color='green', alpha=0.5); p.plot(ax=ax, color='blue', alpha=0.5); plt.show() print(junct.head()) import geojsonio geojsonio.display(junct.to_json()) world = gpd.read_file(gpd.datasets.get_path('naturalearth_lowres')) cities = gpd.read_file(gpd.datasets.get_path('naturalearth_cities')) world.head() world.plot();
def visualize(self): geovis = self.get_gdf() display(geovis.to_json())
import csv, json from geojson import Feature, FeatureCollection, Point, LineString import geopandas as gpd import geojsonio import os # read csv file, and create GeoJSON Feature features = [] with open(os.path.join("data", "plane_data_fixed.csv"), newline='') as cfile: rd = csv.reader(cfile, delimiter=',') for org_lat, org_long, dest_lat, dest_long, ignore in rd: org_lat, org_long = map(float, (org_lat, org_long)) dest_lat, dest_long = map(float, (dest_lat, dest_long)) features.append( Feature(geometry=LineString([(org_long, org_lat), (dest_long, dest_lat)]))) # write Feature to GeoJSON file collection = FeatureCollection(features) with open("target_file.geojson", "w") as f: f.write('%s' % collection) lines = gpd.read_file('target_file.geojson') lines = lines.to_json() # display geojson to broweser io geojsonio.display(lines)
import route2 import random import geog import networkx as nx import osmgraph import itertools import geojsonio import json routeGen = route2.Route2(pref_dist=1000) routeGen.import_file('../../maps/waterloo_small.osm') print 'file imported' start_node = random.choice(list(routeGen.map.nodes())) print('initial start node: '+str(start_node)) # routeGen.setup_initial_pool(start_node) while routeGen.pool == []: routeGen.DFS(routeGen.PREF_DIST, start_node, start_node, [], 50) print routeGen.pool for i in range(0,20): route = random.choice(routeGen.pool) coords = osmgraph.tools.coordinates(routeGen.map, route) geojsonio.display(json.dumps({'type': 'LineString', 'coordinates': coords}))
return df df = get_names(nome) str(df['res'])[5:] import geopandas as gpd from shapely.geometry import Point, Polygon p1 = Point(0, 0) p2 = {"type": "Point", "coordinates": [-73.9617, 40.8067]} print(p1) polygon = Polygon([(38, 38), (40, 40), (47, 47)]) import geojsonio geojsonio.display(p2.to_json()) json.loads(p2) res = requests.get( f'https://servicodados.ibge.gov.br/api/v2/censosss/nomes/guilherme') res = requests.get(f'https://servicodados.ibge.gov.br/api/v3/agregados') res1 = pd.json_normalize(json.loads(res.text)) res1.iloc[2][2] res = requests.get( f'http://servicodados.ibge.gov.br/api/v3/malhas/municipios/4205407?formato=application/vnd.geo+json' ) res.content.decode('utf-8') gj = json.loads(res.content.decode('utf-8')) type(gj)
def visualize(self): self.boba['Coordinates'] = [ Point(xy) for xy in zip(self.boba.Longitude, self.boba.Lat) ] updated = self.get_gdf() display(updated.to_json())
def get_ids(search_filter): """ Return the complete metadata of the matching items """ search_endpoint_request = { "interval": "day", "item_types": [item_type], "filter": search_filter } return requests.post('https://api.planet.com/data/v1/quick-search', auth=HTTPBasicAuth(os.environ['PL_API_KEY'], ''), json=search_endpoint_request) if __name__ == "__main__": import os.path from geojsonio import display # save shapefile as GeoJSON using GeoPandas so we can view it with http://geojson.io if not os.path.exists("output.json"): adm1_pathum_df.to_file("output.json", driver="GeoJSON") adm1_pathum_df.loc[:, 'geometry'] = adm1_pathum_df['geometry'].apply( lambda x: x.simplify(0.00005, preserve_topology=True)) display(adm1_pathum_df.to_json()) # test get_stats result = get_stats( get_pathum_filter('2019-10-01T09:04:34.167792Z', '2019-09-30T00:00:00.000000Z')) print(result)