def gcoos_describe_sensor(r_a, urn): """ Notes: We get all settings from the .cfg file and build the_url. Different RAs are running different versions of SOS so the XML parsing might need some tweaking. This code is known to work with the GCOOS-RA SOS server. """ the_url = CONFIG.get('ra_servers', r_a) the_url = the_url + CONFIG.get('base_urls', 'describe_sensor') the_url = the_url.replace('[anyURI]', urn) if DEBUG: print "gcoos_describe_sensor(%s, %s)..." % (r_a, urn) the_soup = soup(urllib2.urlopen(the_url).read(), 'html.parser') #get position the_pos = the_soup.find('gml:pos').contents[0] latitude = float(the_pos.split(' ')[0]) longitude = float(the_pos.split(' ')[1]) #slurp up the rest of the tasty bits... the_org = the_soup.find('sml:organizationname').contents[0] the_description = the_soup.find('gml:description').contents[0] sensor_list = [] for sensor in set(the_soup.find_all('sml:output')): sensor_list.append(sensor['name']) #Get GeoJSON with it... my_feature = Feature(geometry=Point(([longitude, latitude]))) my_feature.header = {'Organization' : the_org, 'Station' : urn, 'Description' : the_description, 'Sensors' : sensor_list} return my_feature
def constituency_collection(constituencies): features=[] for constituency in constituencies [:10]: feature = Feature(geometry=Polygon(constituency_extent(constituency['ons_code'])['coordinates'])) feature.properties['name'] = constituency['name'] feature.properties['mp'] = constituency['mp'] # feature.properties['party'] = constituency['party'] # feature.properties['url'] = constituency['url'] feature.properties['signature_count'] = constituency['signature_count'] features.append(feature) feature_collection = FeatureCollection(features) return geojson.dumps(feature_collection)
def json_mapas(): municipios = mongo.db.municipios.find({'properties.NOMEUF': 'SANTA CATARINA'}).limit(50) colecao = [] for mun in municipios: f = Feature( properties=mun['properties'], geometry=mun['geometry']) f.properties['name'] = mun['properties']['NOME_1'] colecao.append(f) colecao = FeatureCollection(colecao) return dumps(colecao) #{ $match : { "nfeProc.NFe.infNFe.dest.enderDest.cMun" : "4217501" } } #{"$group":{ # "_id": "$nfeProc.NFe.infNFe.dest.enderDest.cMun", # "total":{"$sum": "$nfeProc.NFe.infNFe.total.ICMSTot.vNF" } # } #}
def request(self, mor, env): features = [] props = { "id": "player", "marker-symbol": "pitch", "title": "You", "marker-size": "large", "marker-color": "663399", "type": "player" } p = Point((mor.PlayerLng, mor.PlayerLat)) f = Feature(geometry=p, id="player", properties=props) features.append(f) fc = FeatureCollection(features) dump = geojson.dumps(fc, sort_keys=True) self._player = dump f = open('ui/player.json', 'w') f.write(dump)
def get_boundaries(gvid, sl): """ Return a cached, static geojson file of boundaries for a region :param gvid: The GVID of the region :param sl: The summary level of the subdivisions of the region. :return: """ from geojson import Feature, Point, FeatureCollection, dumps from shapely.wkt import loads from geoid.civick import GVid from os.path import join, exists from flask import send_from_directory cache_dir = aac.library.filesystem.cache("ui/geo") fn = "{}-{}.geojson".format(str(gvid), sl) fn_path = join(cache_dir, fn) if not exists(fn_path): p = aac.library.partition("census.gov-tiger-2015-counties") features = [] for i, row in enumerate(p): if row.statefp == 6: # In dev, assume counties in California gvid = GVid.parse(row.gvid) f = Feature( geometry=loads(row.geometry).simplify(0.01), properties={"gvid": row.gvid, "state": gvid.state, "county": gvid.county, "count_name": row.name}, ) features.append(f) fc = FeatureCollection(features) with open(fn_path, "w") as f: f.write(dumps(fc)) return send_from_directory(cache_dir, fn, as_attachment=False, mimetype="application/vnd.geo+json")
def _safe_groups_as_geojson(self, sum_interactions): _hot_data = { 'red': ((0., 0.0416, 0.0416), (0.365079, 1.000000, 1.000000), (1.0, 1.0, 1.0)), 'green': ((0., 0., 0.), (0.365079, 0.000000, 0.000000), (0.746032, 1.000000, 1.000000), (1.0, 1.0, 1.0)), 'blue': ((0., 0., 0.), (0.746032, 0.000000, 0.000000), (1.0, 0.6, 0.8)) } third = sum_interactions // 3 flist = [] def clamp(x): return int(max(0, min(x*255, 255))) colormap = LinearSegmentedColormap("hot_tweak", _hot_data) for idx, g in enumerate(self.location_groups): c = g.centroid.as_deg() feature = Feature(geometry=Point(coordinates=(c.long,c.lat))) size = 'medium' if g.interaction_count >= third * 2: size = 'large' elif g.interaction_count < third: size = 'small' feature.properties['marker-size'] = size feature.properties['title'] = '{} interactions'.format(g.interaction_count) c = colormap(g.interaction_count / sum_interactions) color = "#{0:02x}{1:02x}{2:02x}".format(clamp(c[0]), clamp(c[1]), clamp(c[2])) feature.properties['marker-color'] = color flist.append(feature) featurecollection = FeatureCollection(features=flist) with open(os.path.join(self.output_folder, self.GEOJSON_FILENAME), 'w') as f: f.write(geojson.dumps(featurecollection))
def response(context, flow): with decoded(flow.response): if flow.match("~d pgorelease.nianticlabs.com"): env = RpcResponseEnvelopeProto() env.ParseFromString(flow.response.content) key = associate[env.response_id] value = env.returns[0] if (key == GET_MAP_OBJECTS): mor = MapObjectsResponse() mor.ParseFromString(value) print("GET_MAP_OBJECTS %i tiles" % len(mor.tiles)) features = [] for tile in mor.tiles: print("S2 Cell %i" % tile.id) for fort in tile.forts: p = Point((fort.longitude, fort.latitude)) f = Feature(geometry=p, id=len(features), properties={"id": fort.id, "tile": tile.id, "type": "fort", "marker-color": "0000FF"}) features.append(f) for fort in tile.location4: p = Point((fort.longitude, fort.latitude)) f = Feature(geometry=p, id=len(features), properties={"tile": tile.id, "type": "location4", "marker-color": "FFFF00"}) features.append(f) for fort in tile.location9: p = Point((fort.longitude, fort.latitude)) f = Feature(geometry=p, id=len(features), properties={"tile": tile.id, "type": "location9", "marker-color": "00FFFF"}) features.append(f) for fort in tile.close_pokemon_a: p = Point((fort.longitude, fort.latitude)) f = Feature(geometry=p, id=len(features), properties={"id": fort.uid, "tile": tile.id, "type": "close_pokemon_a", "marker-color": "FF0000"}) features.append(f) for fort in tile.close_pokemon_b: p = Point((fort.longitude, fort.latitude)) f = Feature(geometry=p, id=len(features), properties={"id": fort.uid, "tile": tile.id, "type": "close_pokemon_b", "marker-color": "00FF00"}) features.append(f) fc = FeatureCollection(features) dump = geojson.dumps(fc, sort_keys=True) f = open('get_map_objects.json', 'w') f.write(dump) elif (key == FORT_DETAILS): mor = FortDetailsOutProto() mor.ParseFromString(value) print(mor) elif (key == FORT_SEARCH): mor = FortSearchOutProto() mor.ParseFromString(value) print(mor) else: print("API: %s" % key)
def request(context, flow): if flow.match("~d pgorelease.nianticlabs.com"): env = RpcRequestEnvelopeProto() env.ParseFromString(flow.request.content) if ( len(env.parameter) == 0 ): print 'Failed - empty request parameters' return key = env.parameter[0].key value = env.parameter[0].value request_api[env.request_id] = key request_location[env.request_id] = (env.lat,env.long) name = Method.Name(key) name = mismatched_apis.get(name, name) #return class name when not the same as method klass = underscore_to_camelcase(name) + "Proto" try: mor = deserialize(value, "." + klass) print("Deserialized Request %s" % name) except: print("Missing Request API: %s" % name) if (key == GET_MAP_OBJECTS): features = [] props = { "id": "player", "marker-symbol": "pitch", "title": "You", "marker-size": "large", "marker-color": "663399", "type": "player" } p = Point((mor.PlayerLng, mor.PlayerLat)) f = Feature(geometry=p, id="player", properties=props) features.append(f) fc = FeatureCollection(features) dump = geojson.dumps(fc, sort_keys=True) f = open('ui/player.json', 'w') f.write(dump)
def TraceNetwork(lon, lat, directionSQL): # Raw sql statement, used since PG_Routing doesnt have SQLAlchemy ORM support # application.logger.debug(directionSQL) sql = (""" SELECT node.id as id, node.the_geom as geom, GeometryType(node.the_geom) as geomtype INTO TEMP TABLE sp FROM storm_network_vertices_pgr as node WHERE st_intersects(ST_Snap(ST_Transform(ST_SetSRID(ST_Point(:lon, :lat), 4326),2229),node.the_geom, 500), node.the_geom) ORDER BY node.the_geom <-> ST_Transform(ST_SetSRID(ST_Point(:lon, :lat), 4326),2229) LIMIT 1; SELECT sp.id, nodes.* INTO TEMP TABLE traceresults from sp, pgr_drivingDistance( :directionSQL, sp.id, 999999, true) AS nodes; SELECT mh.uuid, st_asgeojson(st_transform(mh.geom, 4326)) as geojson, mh.factype as factype, tr.cost as cost, dwgno, NULL as size, facid as facid, CAST(material as text) as material, CAST(stnd_plan as text) as subtype, GeometryType(mh.geom) as geomtype FROM maintenanceholes mh, traceresults as tr WHERE (mh.node_fk = tr.node) UNION select i.uuid, st_asgeojson(st_transform(i.geom, 4326)) as geojson, i.factype as factype, tr.cost as cost, dwgno, NULL as size, facid as facid, NULL as material, CAST(stnd_plan as text) as subtype, GeometryType(i.geom) as geomtype FROM inlets i, traceresults as tr WHERE (i.node_fk = tr.node) UNION select ol.uuid, st_asgeojson(st_transform(ol.geom, 4326)) as geojson, ol.factype as factype, tr.cost as cost, dwgno, CAST(diameter_h as text) as size, outfall_id as facid, CAST(material as text) as material, CAST(cross_sect as text) as subtype, GeometryType(ol.geom) as geomtype FROM outlets ol, traceresults as tr WHERE (ol.node_fk = tr.node) UNION SELECT gm.uuid, st_asgeojson(st_transform(gm.geom, 4326)) as geojson, gm.factype as factype, tr.cost as cost, NULL as dwgno, CAST(diameter_h as text) as size, facid as facid,CAST(material as text) as material, CAST(subtype as text) as subtype, GeometryType(gm.geom) as geomtype FROM gravitymains gm, traceresults as tr WHERE (gm.edge_fk = tr.edge) UNION SELECT l.uuid, st_asgeojson(st_transform(l.geom, 4326)) as geojson, l.factype as factype, tr.cost as cost, dwgno, CAST(diameter_height as text) as size, facid as facid, CAST(material as text) as material, CAST(subtype as text) as subtype, GeometryType(l.geom) as geomtype FROM laterals l, traceresults as tr WHERE (l.edge_fk = tr.edge) UNION SELECT NULL as uuid, st_asgeojson(st_transform(sp.geom, 4326)) as geojson, 'startpoint' as factype, '0' as cost, NULL as dwgno, NULL as size, NULL as facid, NULL as material, NULL as subtype, GeometryType(sp.geom) as geomtype FROM sp """) # application.logger.debug(sql) # Lists to hold results # results = db.session.execute(sql, {"lat": lat, "lon": lon, "directionSQL": directionSQL}) # results = lacotraceSes.execute(sql, {"lat": lat, "lon": lon, "directionSQL": directionSQL}) with LacotraceSes() as session: results = session.execute(sql, {"lat": lat, "lon": lon, "directionSQL": directionSQL}) # if returnType == "geojson": resultDict = {'startpoint': [], "Inlets": [], "Outlets": [], "Maintenance Holes": [], "Gravity Mains": [], "Laterals": []} # Execute raw SQL query with parameters # startpoint = None id = 1 for i in results: # Load st_asgeojson query results as geojson data geojsonGeom = geojson.loads(i.geojson) # Populate properties for each feature propDict = {} propDict['factype'] = i.factype if i.size: propDict['size_in'] = i.size else: propDict['size_in'] = "Unknown" propDict['dwgno'] = i.dwgno if i.dwgno: propDict['dwgno'] = i.dwgno else: propDict['dwgno'] = "Unknown" # if i.material: # propDict['material'] = i.material # else: # propDict['material'] = "Unknown" propDict['id'] = id if not i.facid: propDict['facid'] = "Unknown" else: propDict['facid'] = i.facid propDict['pipelength_ft'] = i.cost propDict['uuid'] = i.uuid propDict['facsubtype'] = "Unknown" propDict['material'] = "Unknown" if i.factype == "Inlets": propDict['facsubtype'] = DomainLookUps.inletPlanLookUp(str(i.subtype)) resultDict['Inlets'].append(Feature(geometry=Point(geojsonGeom), properties=propDict)) elif i.factype == "Outlets": propDict['material'] = DomainLookUps.gravityMainsMaterialLookup(str(i.material)) resultDict['Outlets'].append(Feature(geometry=Point(geojsonGeom), properties=propDict)) elif i.factype == "Maintenance Holes": propDict['facsubtype'] = DomainLookUps.maintenanceHolePlanLookUp(str(i.subtype)) resultDict['Maintenance Holes'].append(Feature(geometry=Point(geojsonGeom), properties=propDict)) elif i.factype == "Gravity Mains": propDict['material'] = DomainLookUps.gravityMainsMaterialLookup(str(i.material)) resultDict['Gravity Mains'].append(Feature(geometry=MultiLineString(geojsonGeom), properties=propDict)) elif i.factype == "Laterals": propDict['material'] = DomainLookUps.gravityMainsMaterialLookup(str(i.material)) resultDict['Laterals'].append(Feature(geometry=MultiLineString(geojsonGeom), properties=propDict)) elif i.factype == "startpoint": resultDict['startpoint'].append(Feature(geometry=Point(geojsonGeom), properties=propDict)) else: propDict['facsubtype'] = i.subtype id += 1 return resultDict
(SELECT DISTINCT stops.parent_station FROM trips INNER JOIN stop_times ON stop_times.trip_id = trips.trip_id INNER JOIN stops ON stops.stop_id = stop_times.stop_id WHERE route_id = %s) ) AND stops.location_type='1'""" c.execute(sql, (route_id, )) stops = c.fetchall() list_stops = [] geo_stops = [] for stop in stops: geo_stops.append( Feature(geometry=Point( (float(stop['stop_lon']), float(stop['stop_lat']))), properties={ 'name': stop['stop_name'], 'stop_id': stop['stop_id'] })) list_stops.append({ 'name': stop['stop_name'], 'stop_id': stop['stop_id'], 'lat': float(stop['stop_lat']), 'lon': float(stop['stop_lon']) }) collection_stops = FeatureCollection(geo_stops) df_stops = pd.DataFrame(list_stops) df_stops.to_csv("{}.csv".format(args.route), index=False)
geotiff_image= img_path.replace(img_extension, '.tif') translate_coords= GeoTiffProcessor.get_multi_polygon_axis_point_coordinates(geotiff_image, ctr_points, {'debug': False}) final_coords=[] geo_features=[] for poly in translate_coords['coords']: poly_coords=[] poly_geo_coords=[] for cr in poly: poly_coords.append({'x': cr['x'], 'y': cr['y'], 'latitude': cr['lat'], 'longitude': cr['long']}) poly_geo_coords.append((cr['long'], cr['lat'])) # add final closing point poly_geo_coords.append((poly[0]['long'], poly[0]['lat'])) final_coords.append(poly_coords) geo_feature= Feature(geometry=Polygon([poly_geo_coords], precision=15)) geo_features.append(geo_feature) geo_feature_collection = FeatureCollection(geo_features) geo_feature_collection_dump = geojson_dumps(geo_feature_collection, sort_keys=True) # new_ctrs=[] # new_ctrs_debug=[] # for cidx in range(len(contours)): # contour= contours[cidx] # peri = cv2.arcLength(contour, True) # approx = cv2.approxPolyDP(contour, 0.04 * peri, True) # new_ctrs_debug.append({ # 'peri': peri, # 'approx': approx,
prop[classes[i]]['sdi'] = sdi prop[classes[i]]['type'] = "probability" i = 0 for key in prop: pol = Polygon(polygons[key]) if no_feature_collection is True: result = dumps({ 'type': 'Feature', 'geometry': pol, "properties": prop[key] }) print_result(args.output, result) if i < len(prop) - 1: print_result(args.output, ",") else: features.append(Feature(geometry=pol, properties=prop[key])) i = i + 1 if y is not None and no_feature_collection is False: prop = {} polygon = {} for ps in psl: ps = str(ps) if ps in y: coord = y[ps][0].split("_") label = y[ps][0] polygon[label] = get_polygon(int(coord[1]), int(coord[0]), float(gp['cx']), float(gp['cy'])) try:
x2wgs, y2wgs = transform(inProj, outProj, x2rd, y2rd) features = [] for entity in feed.entity: if x1wgs < entity.vehicle.position.longitude and entity.vehicle.position.longitude < x2wgs: if y1wgs < entity.vehicle.position.latitude and entity.vehicle.position.latitude < y2wgs: props = entity.id.split(":") route_data = routesearch(entity.vehicle.trip.route_id, routes) if len(route_data) > 0: route_data = route_data[0] else: route_data = {} route_data['short_name'] = entity.vehicle.trip.route_id route_data['long_name'] = 'Unknown' feature = Feature(geometry=Point( (entity.vehicle.position.longitude, entity.vehicle.position.latitude)), properties={ "trip_id": entity.vehicle.trip.trip_id, "company": props[1], "route_id": entity.vehicle.trip.route_id, "route_code": route_data['short_name'], "route_name": route_data['long_name'], "vehicle_id": entity.vehicle.vehicle.label, "timestamp": None }, id=entity.vehicle.vehicle.label) features.append(feature) feature_collection = FeatureCollection(features) print(geojson.dumps(feature_collection))
c.execute('select gid, site, address, ST_X(geom_datum) as lon, ST_Y(geom_datum) as lat from philly_rec;') rec = c.fetchall() features = [] for play in rec: props = { 'structured': { 'placeholder': 1 }, 'label': play['site'], 'description': play['address'] } feature = Feature(id=play['gid'], geometry=Point((float(play['lon']), float(play['lat']))), properties=props ) features.append(feature) c.close() conn.close() props = { 'id': 'recreation', 'label': 'Philadelphia recreational facilities', 'description': 'Philadelphia dataset for re locations.', 'schema': { 'placeholder': { 'label': 'placeholder metric (always 1)' }
def transform(self, value): geometry = loads(value['geom']) return Feature(geometry=geometry)
def get(self, massif_id: UUID = None) -> Dict: with connection_scope() as con: # alias of table for better reading br = BraRecordTable.alias("br") m = MassifTable.alias("m") d = DepartmentTable.alias("d") # Here we want, for each massifs, the last BRA associated. # This require some advanced quering (lateral join). # the downside is that if you db have no BRA loaded, # then this endpoint will return an empty geojson. # column we want in the lateral lateral_column = [ m.c.m_id, m.c.m_name, m.c.the_geom, br.c.br_id, br.c.br_production_date, br.c.br_expiration_date, br.c.br_is_amended, br.c.br_max_risk, br.c.br_risk_comment, br.c.br_dangerous_slopes, br.c.br_opinion, br.c.br_snow_quality, br.c.br_snow_stability, br.c.br_last_snowfall_date, br.c.br_snowlimit_south, br.c.br_snowlimit_north, ] # select the records by production date (desc mean latest). lateral = (select(lateral_column).where( m.c.m_id == br.c.br_massif).order_by( br.c.br_production_date.desc()).limit(1).lateral()) # Also select if it has a previous and next next_bra = (select([br.c.br_id.label("next_bra_id")]).where( and_( br.c.br_massif == m.c.m_id, br.c.br_production_date > lateral.c.br_production_date, )).order_by(br.c.br_production_date.desc()).limit(1).lateral()) previous_bra = (select([ br.c.br_id.label("previous_bra_id") ]).where( and_( br.c.br_massif == m.c.m_id, br.c.br_production_date < lateral.c.br_production_date, )).order_by(br.c.br_production_date.desc()).limit(1).lateral()) # selecting everything wrapped up. Also joining on department query = (select([ lateral, d.c.d_id, d.c.d_name, d.c.d_number, next_bra, previous_bra, ]).select_from( m.join(lateral, true()).join(d, d.c.d_id == m.c.m_department).outerjoin( next_bra, true()).outerjoin(previous_bra, true())).order_by( m.c.m_id, lateral.c.br_production_date.desc())) # if any if massif_id: query = query.where(m.c.m_id == massif_id) lateral_results = con.execute(query).fetchall() # transform into json results_in_json = marshal(lateral_results, massifs_model) # print(f"{results_in_json[0].previous_bra_id=}") # print(f"{results_in_json[0].next_bra_id=}") features = list() for i, result in enumerate(results_in_json): risks = con.execute( RiskTable.select( RiskTable.c.r_record_id == result["latest_record"] ["id"])).fetchall() result["latest_record"]["risks"] = [{ "id": r.r_id, "risk": r.r_risk, "altitude": r.r_altitude_limit, } for r in risks] features.append( Feature( geometry=GeometryField().format( lateral_results[i].the_geom), properties=result, )) if len(features) == 1: return jsonify(features[0]) return jsonify(FeatureCollection(features))
point_voronoi_list = [] feature_list = [] for region in range(len(vor.regions) - 1): #for region in range(9): vertex_list = [] for x in vor.regions[region]: #Not sure how to map the "infinite" point, so, leave off those regions for now: if x == -1: break else: #Get the vertex out of the list, and flip the order for folium: vertex = vor.vertices[x] vertex = (vertex[1], vertex[0]) vertex_list.append(vertex) #Save the vertex list as a polygon and then add to the feature_list: polygon = Polygon([vertex_list]) feature = Feature(geometry=polygon, properties={}) feature_list.append(feature) #Write the features to the new file: feature_collection = FeatureCollection(feature_list) print(feature_collection, file=vorJSON) vorJSON.close() #Add the voronoi layer to the map: mapLibrary.choropleth(geo_data='SwyVor2.json', fill_color="BuPu", fill_opacity=0.01, line_opacity=0.5) mapLibrary.save(outfile='Subway_NoLtrain.html')
def connect(separated_path, outputpath, fpnumber, fpnumberr, window_coords, wall_coords, window_polygon, new_pair): filename = separated_path + fpnumber img = cv2.imread(filename + '_merged.png') affine_m = [1, 0, 0, -1, 0, img.shape[0]] window_point = [] #change window_coords to shapley Points for i in range(len(window_coords)): window_x = window_coords[i][0] window_y = window_coords[i][1] a = point(window_x, window_y) apoint = af(a, affine_m) window_point.append( apoint) # calibrate coordinates using affine matrix # window_point = shapely points of window endpoints wall_point = [] for i in range(len(wall_coords)): wall_x = wall_coords[i][0] wall_y = wall_coords[i][1] apoint = af(point(wall_x, wall_y), affine_m) wall_point.append(apoint) wdict = dict() for i in range(len(window_polygon)): wlist = [] for j in range(len(window_point)): if window_polygon[i].contains(window_point[j]) == True: wlist.append(j) wdict[i] = wlist # ddict = window_corners in the same window polygon def vis(window_polygon, window_point, wall_point): window_polygon = gpd.GeoDataFrame(window_polygon, columns=['geometry']) window_point = gpd.GeoDataFrame(window_point, columns=['geometry']) wall_point = gpd.GeoDataFrame(wall_point, columns=['geometry']) window_polygon.to_file(outputpath + fpnumber + '_window_polys.shp') window_point.to_file(outputpath + fpnumber + '_window_corner.shp') wall_point.to_file(outputpath + fpnumber + '_wall_corner.shp') vis(window_polygon, window_point, wall_point) close = dict() # 폐합하는 부분 for i in range(len(wdict)): av = [] for j in range(len((wdict[i]))): idx = wdict[i][j] #window 코너의 인덱스 crd = window_coords[idx] #해당 인덱스의 좌표값 for k in range(len(new_pair)): #new_set = [문원래좌표, 바뀐거, 인접벽, 거리] if crd == new_pair[k][0]: #문원래 좌표랑 맞는게 있으면 crd_n = new_pair[k][1] #바뀐 좌표로 바꿔라 av.append(crd_n) close[i] = av # 인접 벽의 좌표들로 바뀐 문 좌표들을 문 인덱스별로 묶음 window_line = [] for i in range(len(close)): sng = [] for j in range(len(close[i])): ax = close[i][j][0] ay = close[i][j][1] a = point(ax, ay) apoint = af(a, affine_m) sng.append(apoint) if len(sng) > 1 and len(sng) < 9: line = string(sng) window_line.append(line) window_gj = [] # window_line을 geojson으로 저장 for i in range(len(window_line)): window_gj.append(Feature(geometry=window_line[i], properties={})) window = FeatureCollection(window_gj) with open(outputpath + fpnumber + '_window_line.geojson', 'w') as f: dump(window, f) with open(outputpath + fpnumber + '_wall.geojson') as f: wall_v = gj.load(f) wall_vecs = [ wall_v['features'][i]['geometry'] for i in range(len(wall_v['features'])) ] #geojson obj wall_vecs = [shape(wall_vecs[i]) for i in range(len(wall_vecs))] #shapely obj wall_lines = sp.ops.linemerge(wall_vecs) # wall_poly = sp.ops.polygonize(wall_lines) ksk = [] for i in range(len(window_line)): kiki = [] for j in range(len(wall_lines)): #여기 수정이 필요할듯 new_window_line = snap(window_line[i], wall_lines[j], tolerance=2) #벽 라인에 대해서 문 라인을 스냅핑 if window_line[i] == new_window_line: continue else: kiki.append(new_window_line) ksk.append(kiki) new_window_gj = [] #폐합하는 window for i in range(len(ksk)): for j in range(len(ksk[i])): new_window_gj.append(Feature(geometry=ksk[i][j], properties={})) new_window = FeatureCollection(new_window_gj) with open(outputpath + fpnumber + '_window_new_line.geojson', 'w') as f: dump(new_window, f) return window_gj
def main(): freeze_support() args = param_parser() intputfolder = args.input_folder.replace('\\', '/') outputfolder = AtlassGen.makedir(args.output_dir.replace('\\', '/')) tilelayoutfile = args.file filetype = args.filetype cores = args.cores copy = args.copy move = args.move batches = args.batches gen_block = args.gen_block block_size = int(args.block_size) ffile = args.txtfile usetxtfile = args.usetxtfile tilesize = args.tilesize tasks = {} tl_in = AtlassTileLayout() if usetxtfile: lines = [line.rstrip('\n') for line in open(ffile)] modificationTime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) for i, line in enumerate(lines): print(line) tilename = line x, y = tilename.split('_') tl_in.addtile(name=tilename, xmin=float(x), ymin=float(y), xmax=float(x) + tilesize, ymax=float(y) + tilesize, modtime=modificationTime) else: tl_in.fromjson(tilelayoutfile) no_of_tiles = len(tl_in) print('\nTotal Number of Files : {0}'.format(no_of_tiles)) batchlen = math.ceil(no_of_tiles / batches) batch = 0 if gen_block: features = [] blocks = [] print('\nBlocking started.') block_path = os.path.join(outputfolder, '{0}m_blocks'.format(block_size)).replace( '\\', '/') for tile in tl_in: tilename = tile.name xmin = tile.xmin xmax = tile.xmax ymin = tile.ymin ymax = tile.ymax tilesize = int(int(xmax) - int(xmin)) block_x = math.floor(xmin / block_size) * block_size block_y = math.floor(ymin / block_size) * block_size blockname = '{0}_{1}'.format(block_x, block_y) block_folder = os.path.join(block_path, blockname).replace('\\', '/') if blockname not in blocks: blocks.append(blockname) boxcoords = AtlassGen.GETCOORDS([xmin, ymin], tilesize) poly = Polygon([[ boxcoords[0], boxcoords[1], boxcoords[2], boxcoords[3], boxcoords[4] ]]) if not os.path.exists(block_folder): AtlassGen.makedir(block_folder) input = os.path.join(intputfolder, '{0}.{1}'.format( tilename, filetype)).replace('\\', '/') output = os.path.join(block_folder, '{0}.{1}'.format( tilename, filetype)).replace('\\', '/') #print(output) #block_task[blockname] = AtlassTask(blockname, movefiles, input, output) if copy: tasks[tilename] = AtlassTask(tilename, copyfile, input, output) elif move: tasks[tilename] = AtlassTask(tilename, movefiles, input, output) else: print("no command selected") p = Pool(processes=cores) results = p.map(AtlassTaskRunner.taskmanager, tasks.values()) success = 0 for result in results: if not result.success: print('File {0} could Not be copied/moved'.format(result.name)) else: success += 1 print('No of blocks : {0}'.format(len(blocks))) print('\nFiles copied/moved Successfully : {0}'.format(success)) for block in blocks: blockname = block block_folder = os.path.join(block_path, blockname).replace('\\', '/') lfiles = AtlassGen.FILELIST(['*.{0}'.format(filetype)], block_folder) tilelayout = AtlassTileLayout() features = [] for lf in lfiles: path, tilename, ext = AtlassGen.FILESPEC(lf) xmin, ymin = tilename.split('_') xmax = str(int(xmin) + tilesize) ymax = str(int(ymin) + tilesize) boxcoords = AtlassGen.GETCOORDS([xmin, ymin], tilesize) poly = Polygon([[ boxcoords[0], boxcoords[1], boxcoords[2], boxcoords[3], boxcoords[4] ]]) #adding records for json file features.append( Feature(geometry=poly, properties={ "name": tilename, "xmin": xmin, "ymin": ymin, "xmax": xmax, "ymax": ymax, "tilenum": tilename })) tilelayout.addtile(name=tilename, xmin=float(xmin), ymin=float(ymin), xmax=float(xmax), ymax=float(ymax)) jsonfile = 'TileLayout' jsonfile = os.path.join( block_folder, '{0}_{1}.json'.format(jsonfile, len(features))) feature_collection = FeatureCollection(features) with open(jsonfile, 'w') as f: dump(feature_collection, f) else: for i, tile in enumerate(tl_in): tilename = '{0}.{1}'.format(tile.name, filetype) if i % batchlen == 0: batch = batch + 1 batchstring = '{0}'.format(batch) batchstring = batchstring.rjust(3, '0') if batches == 1: output = os.path.join(outputfolder, tilename).replace("\\", "/") else: output = os.path.join( AtlassGen.makedir('{0}/Batch_{1}'.format( outputfolder, batchstring)), tilename).replace("\\", "/") input = os.path.join(intputfolder, tilename).replace("\\", "/") if copy: tasks[tilename] = AtlassTask(tilename, copyfile, input, output) elif move: tasks[tilename] = AtlassTask(tilename, movefiles, input, output) else: print("no command selected") p = Pool(processes=cores) results = p.map(AtlassTaskRunner.taskmanager, tasks.values()) success = 0 for result in results: if not result.success: print('File {0} could Not be copied/moved'.format(result.name)) else: success += 1 print('Files copied/moved Successfully : {0}'.format(success))
def fulltextsearch(self): lang = locale_negotiator(self.request) try: language = self.languages[lang] except KeyError: return HTTPInternalServerError( detail="{0!s} not defined in languages".format(lang)) if "query" not in self.request.params: return HTTPBadRequest(detail="no query") terms = self.request.params.get("query") maxlimit = self.settings.get("maxlimit", 200) try: limit = int( self.request.params.get("limit", self.settings.get("defaultlimit", 30))) except ValueError: return HTTPBadRequest(detail="limit value is incorrect") if limit > maxlimit: limit = maxlimit try: partitionlimit = int(self.request.params.get("partitionlimit", 0)) except ValueError: return HTTPBadRequest(detail="partitionlimit value is incorrect") if partitionlimit > maxlimit: partitionlimit = maxlimit terms_ts = "&".join( w + ":*" for w in IGNORED_CHARS_RE.sub(" ", terms).split(" ") if w != "") _filter = FullTextSearch.ts.op("@@")(func.to_tsquery( language, terms_ts)) if self.request.user is None or self.request.user.role is None: _filter = and_(_filter, FullTextSearch.public.is_(True)) else: _filter = and_( _filter, or_(FullTextSearch.public.is_(True), FullTextSearch.role_id.is_(None), FullTextSearch.role_id == self.request.user.role.id)) if "interface" in self.request.params: _filter = and_( _filter, or_( FullTextSearch.interface_id.is_(None), FullTextSearch.interface_id == self._get_interface_id( self.request.params["interface"]))) else: _filter = and_(_filter, FullTextSearch.interface_id.is_(None)) _filter = and_( _filter, or_( FullTextSearch.lang.is_(None), FullTextSearch.lang == lang, )) # The numbers used in ts_rank_cd() below indicate a normalization method. # Several normalization methods can be combined using |. # 2 divides the rank by the document length # 8 divides the rank by the number of unique words in document # By combining them, shorter results seem to be preferred over longer ones # with the same ratio of matching words. But this relies only on testing it # and on some assumptions about how it might be calculated # (the normalization is applied two times with the combination of 2 and 8, # so the effect on at least the one-word-results is therefore stronger). rank = func.ts_rank_cd(FullTextSearch.ts, func.to_tsquery(language, terms_ts), 2 | 8) if partitionlimit: # Here we want to partition the search results based on # layer_name and limit each partition. row_number = func.row_number().over( partition_by=FullTextSearch.layer_name, order_by=(desc(rank), FullTextSearch.label)).label("row_number") subq = DBSession.query(FullTextSearch) \ .add_columns(row_number).filter(_filter).subquery() query = DBSession.query(subq.c.id, subq.c.label, subq.c.params, subq.c.layer_name, subq.c.the_geom, subq.c.actions) query = query.filter(subq.c.row_number <= partitionlimit) else: query = DBSession.query(FullTextSearch).filter(_filter) query = query.order_by(desc(rank)) query = query.order_by(FullTextSearch.label) query = query.limit(limit) objs = query.all() features = [] for o in objs: properties = { "label": o.label, } if o.layer_name is not None: properties["layer_name"] = o.layer_name if o.params is not None: properties["params"] = o.params if o.actions is not None: properties["actions"] = o.actions if o.actions is None and o.layer_name is not None: properties["actions"] = [{ "action": "add_layer", "data": o.layer_name, }] if o.the_geom is not None: geom = to_shape(o.the_geom) feature = Feature(id=o.id, geometry=geom, properties=properties, bbox=geom.bounds) features.append(feature) else: feature = Feature(id=o.id, properties=properties) features.append(feature) # TODO: add callback function if provided in self.request, else return geojson return FeatureCollection(features)
def export_observations_web(info_role): """Optimized route for observations web export. .. :quickref: Synthese; This view is customisable by the administrator Some columns are mandatory: id_synthese, geojson and geojson_local to generate the exported files POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view :query str export_format: str<'csv', 'geojson', 'shapefiles'> """ params = request.args # set default to csv export_format = "csv" export_view = GenericTableGeo( tableName="v_synthese_for_export", schemaName="gn_synthese", engine=DB.engine, geometry_field=None, srid=current_app.config["LOCAL_SRID"], ) if "export_format" in params: export_format = params["export_format"] # get list of id synthese from POST id_list = request.get_json() db_cols_for_shape = [] columns_to_serialize = [] # loop over synthese config to get the columns for export for db_col in export_view.db_cols: if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) q = DB.session.query(export_view.tableDef).filter( export_view.tableDef.columns[current_app.config["SYNTHESE"] ["EXPORT_ID_SYNTHESE_COL"]].in_(id_list)) # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[0] if cruved["R"] > cruved["E"]: # filter on cruved specifying the column # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view q = synthese_query.filter_query_with_cruved( export_view.tableDef, q, info_role, id_synthese_column=current_app.config["SYNTHESE"] ["EXPORT_ID_SYNTHESE_COL"], id_dataset_column=current_app.config["SYNTHESE"] ["EXPORT_ID_DATASET_COL"], observers_column=current_app.config["SYNTHESE"] ["EXPORT_OBSERVERS_COL"], id_digitiser_column=current_app.config["SYNTHESE"] ["EXPORT_ID_DIGITISER_COL"], with_generic_table=True, ) results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"]) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) # columns = [db_col.key for db_col in export_view.db_cols] if export_format == "csv": formated_data = [ export_view.as_dict(d, columns=columns_to_serialize) for d in results ] return to_csv_resp(file_name, formated_data, separator=";", columns=columns_to_serialize) elif export_format == "geojson": features = [] for r in results: geometry = ast.literal_eval( getattr( r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"])) feature = Feature( geometry=geometry, properties=export_view.as_dict(r, columns=columns_to_serialize), ) features.append(feature) results = FeatureCollection(features) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: filemanager.delete_recursively(str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]) dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape( db_cols=db_cols_for_shape, data=results, geojson_col=current_app.config["SYNTHESE"] ["EXPORT_GEOJSON_LOCAL_COL"], dir_path=dir_path, file_name=file_name, ) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/synthese", )
def __init__(self): Module.__init__(self) Feature.__init__(self)
def segmentImage(input_parameters): """ This function takes an input URL, seed point, and tolerance and produces a pointlist of the outer most contour """ import cv2 import numpy as np from numpy import unique, squeeze import Image import cStringIO import re import geojson from geojson import Polygon, Feature, FeatureCollection opdata = input_parameters print opdata imgstr = re.search(r'base64,(.*)', opdata['image']).group(1) tempimg = cStringIO.StringIO(imgstr.decode('base64')) tempimg.seek(0) cvimg = cv2.imdecode(np.asarray(bytearray(tempimg.read()), dtype=np.uint8), 1) # cv2.imwrite('inputimage.png', cvimg) # imgray = cv2.cvtColor(cvimg,cv2.COLOR_BGR2GRAY) imgray = cvimg[:, :, 2] # cv2.imwrite('segment.png', imgray) all_cnts = [] cntdict = {} return_data = [] extent = opdata['extent'] tr = extent[0] bl = extent[1] native_width = tr[0] - bl[0] native_height = -bl[1] + tr[1] x_scale = native_width / imgray.shape[1] y_scale = native_height / imgray.shape[0] def contourToGeoString(cnt): '''convert an opencv contour to a geojson-compatible representation''' t_string = [] for pt in cnt: px = np.round(pt[0] * x_scale) + bl[0] py = -1 * np.round(pt[1] * y_scale) + tr[1] t_string.append((float(px), float(py))) return t_string unique_labels = unique(imgray) print 'uniques %s' % (unique_labels) # we're going to make an assumption: only consider a single hole in a polygon for label in unique_labels: working_img = imgray.copy() working_img[working_img != label] = 0 # CV_RETR_CCOMP retrieves all of the contours and organizes them into a two-level # hierarchy. At the top level, there are external boundaries of the components. # At the second level, there are boundaries of the holes. If there is another contour # inside a hole of a connected component, it is still put at the top level. contours, hierarchy = cv2.findContours(working_img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE) # hierarchy[i][0] , hiearchy[i][1] , hiearchy[i][2] , and hiearchy[i][3] are set # to 0-based indices in contours of the next and previous contours at the same # hierarchical level, the first child contour and the parent contour, respectively. # If for the contour i there are no next, previous, parent, or nested contours, # the corresponding elements of hierarchy[i] will be negative. for n, cnt in enumerate(contours): hei = hierarchy[0][n] # print hei # create an array for this polygon if str(label) not in cntdict.keys(): cntdict[str(label)] = [] if hei[3] >= 0: print '%s: %d -> this contour has a parent: %d' % (label, n, hei[3]) # this contour has a parent, do not add it directly pass elif hei[2] < 0: # this contour has no children, just add it outer_poly = (contourToGeoString(squeeze(cnt))) # x_vals = np.round(ca[:,0] * x_scale) + bl[0] # y_vals = -1*np.round(ca[:,1] * y_scale) + tr[1] print '(add) %s: %d -> this contour (%d) has no children' % ( label, n, len(outer_poly)) print outer_poly geo = Polygon([outer_poly]) feat = Feature(geometry=geo, id=len(all_cnts)) feat['properties']['labelindex'] = str(label) cntdict[str(label)].append(feat) all_cnts.append(feat) else: # contour's child is at contours[hei[2]] # add this contour and it's child outer_poly = contourToGeoString(squeeze(cnt)) inner_poly = contourToGeoString(squeeze(contours[hei[2]])) print '(add) %s: %d -> this contour (%d) has a child: %d (%d)' % ( label, n, len(outer_poly), hei[2], len(inner_poly)) geo = Polygon([outer_poly, inner_poly]) feat = Feature(geometry=geo, id=len(all_cnts)) feat['properties']['labelindex'] = str(label) cntdict[str(label)].append(feat) all_cnts.append(feat) for c in all_cnts: return_data.append(geojson.dumps(c)) print 'There are %d features to return' % (len(return_data)) # msg['features'] = return (return_data)
def main(): connection = ftp_connection(cfg.url, cfg.username, cfg.password) connection.cwd("FIRMS/c6/Global") root_path = os.path.dirname(os.path.abspath(__file__)) date_list = get_last_n_dates(n=2) satellite_fname = "MODIS_C6_Global_MCD14DL_NRT_{0}.txt" outfile = open(cfg.outfile, mode="w") feature_collection = list() for day in date_list: julian_date = calendar2julian(day) satellite_file = satellite_fname.format(julian_date) if satellite_file not in connection.nlst(): print "The file {0} does not exist!".format(satellite_file) continue downloaded_fname = os.path.join(root_path, "{0}.csv".format(julian_date)) download_file(connection, satellite_file, downloaded_fname) downloaded_file = open(downloaded_fname, "r") reader = csv.DictReader(downloaded_file) for row in reader: # continue if the fire was recorded over 24 hours ago acquisition_date = datetime.strptime("{0} {1}".format(row["acq_date"], row["acq_time"]), "%Y-%m-%d %H:%M") today = datetime.utcnow() record_age = (today - acquisition_date).total_seconds()/3600.00 if record_age > 24.00: continue else: row["record_age"] = int(record_age) row["acq_datetime"] = str(acquisition_date) point = Point((float(row["longitude"]), float(row["latitude"]))) feature = Feature(geometry=point) for key in row: if key not in ["longitude", "latitude", "acq_date", "acq_time"]: feature.properties[key] = row[key] feature_collection.append(feature) downloaded_file.close() os.remove(downloaded_fname) geojson_content = FeatureCollection(feature_collection) reference_date = datetime(1970, 1, 1, 0, 0) geojson_content.features = sorted(geojson_content.features, key=lambda x: datetime.strptime(x["properties"]["acq_datetime"], "%Y-%m-%d %H:%M:%S"), reverse=True) json.dump(geojson_content, outfile, indent=2, separators=(",", ": ")) outfile.close() connection.close()
def fillImageGeoJSON(params): #todo implement a smart url-based hashing cache # loading image from url into memory, first as np array then opencv image req = urllib.urlopen(params['image']['url']) arr = np.asarray(bytearray(req.read()), dtype=np.uint8) img = cv2.imdecode(arr, -1) # 'load it as it is' h, w = img.shape[:2] mask = np.zeros((h + 2, w + 2), np.uint8) lo = int(params['tolerance']) hi = int(params['tolerance']) connectivity = 4 flags = connectivity flags |= cv2.FLOODFILL_FIXED_RANGE # print 'relative', params['click']['relative'] # print 'absolute', params['click']['absolute'] relclick = np.asarray(params['click']['relative']) absclick = np.asarray(params['click']['absolute']) regsize = np.asarray(params['image']['region']['size']) region_origin = np.asarray(params['image']['region']['origin']) regclick = absclick - region_origin reg_relclick = regclick / regsize real_size = np.asarray([w, h]) region_real_click = real_size * reg_relclick # print real_size # print region_real_click seed_pt = (int(region_real_click[0]), int(region_real_click[1])) # seed_pt = (int(params['click']['relative'][0] * w), int(params['click']['relative'][1] * h)) # this doesn't work when an edge is clipped cv2.floodFill(img, mask, seed_pt, (255, 190, 00), (lo, lo, lo), (hi, hi, hi), flags) contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # contours are now defined in the coordinates of the image captured # to get their relative position in the subimage subimage_x_to_rel_subimage_x = 1. / w subimage_y_to_rel_subimage_y = 1. / h # since we know the transform of the subimage coordinate frame to the native coordinate frame js_region_width = float(params['image']['region']['size'] [0]) # this is width in native coordinates js_region_height = float(params['image']['region']['size'] [1]) # this is height in native coordinates js_region_origin_x = float(params['image']['region']['origin'] [0]) # this is offset in native coordinates js_region_origin_y = float(params['image']['region']['origin'] [1]) # this is offset in native coordinates def contourToGeoString(cnt): '''convert an opencv contour to a geojson-compatible representation''' t_string = [] for pt in cnt: rx = subimage_x_to_rel_subimage_x * pt[0] ry = subimage_y_to_rel_subimage_y * pt[1] new_x = (js_region_width * rx) + js_region_origin_x - 3 new_y = -1 * ((js_region_height * ry) + js_region_origin_y - 3) # px = np.round(pt[0] * x_scale) + bl[0] # py = -1*np.round(pt[1] * y_scale) + tr[1] t_string.append((float(new_x), float(new_y))) return t_string outer_poly = (contourToGeoString(squeeze(contours[0][0]))) geo = Polygon([outer_poly]) feat = Feature(geometry=geo) feat['properties']['rgbcolor'] = '''rgba(255, 255, 255, 0.1)''' feat['properties']['hexcolor'] = '''#ff0000''' feat['properties']['source'] = 'autofill' del img, mask return_msg = {} return_msg['features'] = [geojson.dumps(feat)] return return_msg
print("Invalid number of arguments used!") print("Usage: objtogeojson.py <input OBJ file> <output GeoJSON file>") sys.exit() input_file = sys.argv[1] output_file = sys.argv[2] vertices = [] features = [] for line in open(input_file, "r").readlines(): split_line = line.strip().split(" ") identifier = split_line[0] data = split_line[1:] if identifier == "v": vertex = [float(val) for val in data] vertices.append(vertex) features.append(Feature(geometry=Point(tuple(vertex)))) if identifier == "f": line = tuple( [vertices[index - 1] for index in [int(val) for val in data]]) features.append(Feature(geometry=LineString(line))) feature_collection = FeatureCollection(features) with open(output_file, 'w') as f: dump(feature_collection, f)
def get(self): raw_sources = request.args.get('sources', '', str) sources: List[str] = raw_sources.split(',') lat: Optional[float] = request.args.get('lat', type=float) lon: Optional[float] = request.args.get('lon', type=float) radius: Optional[float] = request.args.get('radius', type=float) if not lat or not lon or not radius: return { 'code': http.HTTPStatus.BAD_REQUEST, 'message': http.HTTPStatus.BAD_REQUEST.phrase, 'error': 'Data lat, lon, radius are required.' } raw_bbox = request.args.get('bbox', '', str) try: bbox: List[float] = list(map(float, raw_sources.split(','))) except (TypeError, ValueError): # Case of None bbox = [] building: Optional[str] = request.args.get('building', type=str) if not building: return { 'code': http.HTTPStatus.BAD_REQUEST, 'message': http.HTTPStatus.BAD_REQUEST.phrase, 'error': 'Data building is required.' } raw_asset_tracking_ids = request.args.get('assetTrackingIds', '', str) asset_tracking_ids: List[str] = raw_asset_tracking_ids.split(',') if not asset_tracking_ids[0]: # the value of [0] will be empty string if there is nothing. return { 'code': http.HTTPStatus.BAD_REQUEST, 'message': http.HTTPStatus.BAD_REQUEST.phrase, 'error': 'Data assetTrackingIds is required.' } raw_tags_any = request.args.get('tags_any', '', str) tags_any: List[str] = raw_asset_tracking_ids.split(',') raw_tags_all = request.args.get('tags_all', '', str) tags_all: List[str] = raw_asset_tracking_ids.split(',') operator: Optional[str] = request.args.get('operator', type=str) advertiser: Optional[str] = request.args.get('advertiser', type=str) limit: Optional[int] = request.args.get('limit', type=int) point = Point((lon, lat)) properties = { 'radius': radius, 'assetTrackingId': asset_tracking_ids[0], 'buildingRef': building } feature = Feature(id="1", geometry=point, properties=properties) features = [feature] feature_collection = FeatureCollection(features=features) return { 'code': http.HTTPStatus.OK, 'message': http.HTTPStatus.OK.phrase, 'data': geojson.dumps(feature_collection) }
def main(): """ Read files, and extract the ground surface. Store this as a new GeoJSON file. """ # All files to convert with their destination file source_files = ["../Data/Astoria/OCM/Oregon-41007-000.json", "../Data/Seattle/OCM/Washington-53033-004.json", "../Data/Seattle/OCM/Washington-53033-016.json", "../Data/Portland/OCM/Oregon-41051-000.json", "../Data/Portland/OCM/Oregon-41051-001.json", "../Data/Portland/OCM/Oregon-41051-002.json", "../Data/Portland/OCM/Oregon-41051-003.json", "../Data/Portland/OCM/Oregon-41051-004.json", "../Data/Portland/OCM/Oregon-41051-005.json", "../Data/Portland/OCM/Oregon-41051-006.json", "../Data/Portland/OCM/Oregon-41051-007.json", "../Data/SanDiego/OCM/California-06073-002.json", "../Data/SanDiego/OCM/California-06073-003.json", "../Data/SanDiego/OCM/California-06073-004.json", "../Data/SanDiego/OCM/California-06073-012.json"] dest_files = ["../Data/Astoria/OCM/2D/Oregon-41007-000_2D.geojson", "../Data/Seattle/OCM/2D/Washington-53033-004_2D.geojson", "../Data/Seattle/OCM/2D/Washington-53033-016_2D.geojson", "../Data/Portland/OCM/2D/Oregon-41051-000_2D.geojson", "../Data/Portland/OCM/2D/Oregon-41051-001_2D.geojson", "../Data/Portland/OCM/2D/Oregon-41051-002_2D.geojson", "../Data/Portland/OCM/2D/Oregon-41051-003_2D.geojson", "../Data/Portland/OCM/2D/Oregon-41051-004_2D.geojson", "../Data/Portland/OCM/2D/Oregon-41051-005_2D.geojson", "../Data/Portland/OCM/2D/Oregon-41051-006_2D.geojson", "../Data/Portland/OCM/2D/Oregon-41051-007_2D.geojson", "../Data/SanDiego/OCM/2D/California-06073-002_2D.geojson", "../Data/SanDiego/OCM/2D/California-06073-003_2D.geojson", "../Data/SanDiego/OCM/2D/California-06073-004_2D.geojson", "../Data/SanDiego/OCM/2D/California-06073-012_2D.geojson"] for i, fname in enumerate(source_files): print(fname) with open(fname) as filepointer: data = json.load(filepointer) # Extract cityobjects and vertices list cityobjects = data['CityObjects'] vertices = np.array(data['vertices']) features = [] for obj_id in cityobjects: # Extract the list with indices of the vertices coord_idxs = cityobjects[obj_id]['geometry'][0]['boundaries'] attributes = cityobjects[obj_id]['attributes'] attributes['id'] = obj_id # Go over all these index sets and find the one where the # z-value is all zero -> ground surface for idx_set in coord_idxs[0]: coordinates = vertices[idx_set[0]] zeros = np.count_nonzero(coordinates[:, 2]) if zeros == 0: coords_2D = np.delete(coordinates, np.s_[2], axis=1) footprint = Polygon(coords_2D) break # Check for invalid polygons, fix them if invalid if not footprint.is_valid: print("Fixing invalid polygon. ID:", obj_id) footprint = footprint.buffer(0) # Create the geojson feature based on the geometry and attributes geojson_feature = Feature(geometry=footprint, properties=attributes) # Check if the features that we store are actually valid if not geojson_feature.is_valid: print("Invalid Feature. ID:", obj_id) features.append(geojson_feature) # Make putt all features in the geojson feature collection feature_collection = FeatureCollection(features) # Write the 2D footprints with their attributes to a new file with open(dest_files[i], 'w') as filepointer: dump(feature_collection, filepointer)
def make_labels(dest_folder, zoom, country, classes, ml_type, bounding_box, **kwargs): """Create label data from OSM QA tiles for specified classes Perform the following operations: - If necessary, re-tile OSM QA Tiles to the specified zoom level - Iterate over all tiles within the bounding box and produce a label for each - Save the label file as labels.npz - Create an output for previewing the labels (GeoJSON or PNG depending upon ml_type) Parameters ------------ dest_folder: str Folder to save labels and example tiles into zoom: int The zoom level to create tiles at classes: list A list of classes for machine learning training. Each class is defined as a dict with two required properties: - name: class name - filter: A Mapbox GL Filter. See the README for more details imagery: str Imagery template to download satellite images from. Ex: http://a.tiles.mapbox.com/v4/mapbox.satellite/{z}/{x}/{y}.jpg?access_token=ACCESS_TOKEN ml_type: str Defines the type of machine learning. One of "classification", "object-detection", or "segmentation" bounding_box: list The bounding box to create images from. This should be given in the form: `[xmin, ymin, xmax, ymax]` as longitude and latitude values between `[-180, 180]` and `[-90, 90]` respectively **kwargs: dict Other properties from CLI config passed as keywords to other utility functions """ mbtiles_file = op.join(dest_folder, '{}.mbtiles'.format(country)) mbtiles_file_zoomed = op.join(dest_folder, '{}-z{!s}.mbtiles'.format(country, zoom)) if not op.exists(mbtiles_file_zoomed): print('Retiling QA Tiles to zoom level {} (takes a bit)'.format(zoom)) filtered_geo = op.join(dest_folder, '{}.geojson'.format(country)) ps = Popen(['tippecanoe-decode', '-c', '-f', mbtiles_file], stdout=PIPE) stream_filter_fpath = op.join(op.dirname(label_maker.__file__), 'stream_filter.py') run(['python', stream_filter_fpath, json.dumps(bounding_box)], stdin=ps.stdout, stdout=open(filtered_geo, 'w')) ps.wait() run(['tippecanoe', '--no-feature-limit', '--no-tile-size-limit', '-P', '-l', 'osm', '-f', '-z', str(zoom), '-Z', str(zoom), '-o', mbtiles_file_zoomed, filtered_geo]) # Call tilereduce print('Determining labels for each tile') mbtiles_to_reduce = mbtiles_file_zoomed tilereduce(dict(zoom=zoom, source=mbtiles_to_reduce, bbox=bounding_box, args=dict(ml_type=ml_type, classes=classes)), _mapper, _callback, _done) # Add empty labels to any tiles which didn't have data empty_label = _create_empty_label(ml_type, classes) for tile in tiles(*bounding_box, [zoom]): index = '-'.join([str(i) for i in tile]) if tile_results.get(index) is None: tile_results[index] = empty_label # Print a summary of the labels _tile_results_summary(ml_type, classes) # write out labels as numpy arrays labels_file = op.join(dest_folder, 'labels.npz') print('Write out labels to {}'.format(labels_file)) np.savez(labels_file, **tile_results) # write out labels as GeoJSON or PNG if ml_type == 'classification': features = [] for tile, label in tile_results.items(): feat = feature(Tile(*[int(t) for t in tile.split('-')])) features.append(Feature(geometry=feat['geometry'], properties=dict(label=label.tolist()))) json.dump(fc(features), open(op.join(dest_folder, 'classification.geojson'), 'w')) elif ml_type == 'object-detection': label_folder = op.join(dest_folder, 'labels') if not op.isdir(label_folder): makedirs(label_folder) for tile, label in tile_results.items(): # if we have at least one bounding box label if bool(label.shape[0]): label_file = '{}.png'.format(tile) img = Image.new('RGB', (256, 256)) draw = ImageDraw.Draw(img) for box in label: draw.rectangle(((box[0], box[1]), (box[2], box[3])), outline='red') print('Writing {}'.format(label_file)) img.save(op.join(label_folder, label_file)) elif ml_type == 'segmentation': label_folder = op.join(dest_folder, 'labels') if not op.isdir(label_folder): makedirs(label_folder) for tile, label in tile_results.items(): # if we have any class pixels if np.sum(label): label_file = '{}.png'.format(tile) img = Image.fromarray(label * 255) print('Writing {}'.format(label_file)) img.save(op.join(label_folder, label_file))
def searchPoi(lang_code, search_text, mode): poi_list = [] pois = Poi.objects.filter( Q(name__icontains=search_text) | Q(poi_tags__icontains=search_text) | Q(category__cat_name__icontains=search_text)).filter(enabled=True) if lang_code == "de": pois = Poi.objects.filter( Q(name_de__icontains=search_text) | Q(poi_tags__icontains=search_text) | Q(category__cat_name_de__icontains=search_text)).filter( enabled=True) build_name = "" icon_path = "" if pois: for poi in pois: if hasattr(poi.fk_building, 'building_name'): build_name = poi.fk_building.building_name if hasattr(poi.category.fk_poi_icon, 'poi_icon'): icon_path = str(poi.category.fk_poi_icon.poi_icon) center_geom = json.loads(poi.geom.geojson) if lang_code == "de": poi_data = { "label": poi.name_de, "name": poi.name_de, "name_de": poi.name_de, "type": "", "external_id": "", "centerGeometry": center_geom, "floor_num": poi.floor_num, "floor_name": poi.floor_name, "building": build_name, "aks_nummer": "", "roomcode": "", "parent": poi.category.cat_name_de, "category": { 'id': poi.category_id, 'cat_name': poi.category.cat_name_de }, "icon": icon_path, "poi_link_unique": "/?poi-id=" + str(poi.id) + "&floor=" + str(poi.floor_num), "poi_link_category": "/?poi-cat-id=" + str(poi.category_id), "src": "poi db", "poi_id": poi.id } if mode == "search": new_feature_geojson = Feature(geometry=center_geom, properties=poi_data) poi_list.append(new_feature_geojson) elif mode == "autocomplete": poi_list.append(poi_data) else: poi_data = { "label": poi.name, "name": poi.name, "name_de": poi.name_de, "type": "", "external_id": "", "centerGeometry": center_geom, "floor_num": poi.floor_num, "building": build_name, "aks_nummer": "", "roomcode": "", "parent": poi.category.cat_name, "category": { 'id': poi.category_id, 'cat_name': poi.category.cat_name_en }, "poi_link_unique": "/?poi-id=" + str(poi.id) + "&floor=" + str(poi.floor_num), "poi_link_category": "/?poi-cat-id=" + str(poi.category_id), "icon": icon_path, "src": "poi db", "poi_id": poi.id } if mode == "search": new_feature_geojson = Feature(geometry=center_geom, properties=poi_data) poi_list.append(new_feature_geojson) elif mode == "autocomplete": poi_list.append(poi_data) spaces_list = [{ "name": _(space.room_code), "name_" + lang_code: _(space.room_code), "id": space.id, "space_id": space.id } for space in BuildingFloorSpace.objects.filter( room_code__isnull=False).filter(room_code__icontains=search_text)] if poi_list: final_geojs_res = FeatureCollection(features=poi_list) else: final_geojs_res = False if mode == "search": if final_geojs_res: return final_geojs_res else: return False else: if poi_list: return poi_list else: return False
from geojson import Feature, Point, FeatureCollection with open('ChildCareCentersGPS.csv', 'r') as csvfile: reader = csv.reader(csvfile) centers_list = list(reader) num_rows = len(centers_list) features = [] for x in range(1, num_rows): #print x current_entry = centers_list[x] current_feature = Feature(geometry=Point((float(current_entry[20]), float(current_entry[21]))), properties={"Operation #": str(current_entry[0]), "Agency Number": str(current_entry[1]), "Operation/Caregiver Name": str(current_entry[2]), "Address": str(current_entry[3]), "City": str(current_entry[4]), "State": str(current_entry[5]), "Zip": str(current_entry[6]), "County": str(current_entry[7]), "Phone": str(current_entry[8]), "Type": str(current_entry[9]), "Status": str(current_entry[10]), "Issue Date": str(current_entry[11]), "Capacity": str(current_entry[12]), "Email Address": str(current_entry[13]), "Facility ID": str(current_entry[14]), "Monitoring Frequency": str(current_entry[15]), "Infant": str(current_entry[16]), "Toddler": str(current_entry[17]), "Preschool": str(current_entry[18]), "School": str(current_entry[19])}) features.append(current_feature) data = json.loads(geojson.dumps(FeatureCollection(features))) with open('data.json', 'w') as outfile: json.dump(data, outfile, sort_keys = False, indent = 4, ensure_ascii = False, separators=(',', ':')) #print geojson.dumps(FeatureCollection(features))
'part 1A', 'part 1B', ], 'name': 'Aspect 1', 'short': [ 'p1A', 'p1B', ], 'type': 'categories', 'values': [X[0][0], X[0][1]] }) p['variables'].append({ 'labels': [ 'part 2A', 'part 2B', ], 'name': 'Aspect 2', 'short': [ 'p2A', 'p2B', ], 'type': 'categories', 'values': [X[1][0], X[1][1]] }) gj['features'].append(Feature(geometry=g, properties=p)) with open(outFolder + '/{0:04d}_n.gj'.format(y), 'w') as fout: dump(gj, fout, sort_keys=True, indent=4, separators=(',', ': '))
def project_edit(request): id = request.matchdict['project'] project = DBSession.query(Project).get(id) licenses = DBSession.query(License).all() if 'form.submitted' in request.params: for locale, translation in project.translations.iteritems(): with project.force_locale(locale): for field in [ 'name', 'short_description', 'description', 'instructions', 'per_task_instructions' ]: translated = '_'.join([field, locale]) if translated in request.params: setattr(project, field, request.params[translated]) DBSession.add(project) for p in ['changeset_comment', 'entities_to_map', 'imagery']: if p in request.params: setattr(project, p, request.params[p]) if 'license_id' in request.params and \ request.params['license_id'] != "": license_id = int(request.params['license_id']) license = DBSession.query(License).get(license_id) project.license = license if 'private' in request.params and \ request.params['private'] == 'on': project.private = True else: project.private = False project.status = request.params['status'] project.priority = request.params['priority'] if request.params.get('due_date', '') != '': due_date = request.params.get('due_date') due_date = datetime.datetime.strptime(due_date, "%m/%d/%Y") project.due_date = due_date else: project.due_date = None if 'josm_preset' in request.params: josm_preset = request.params.get('josm_preset') if hasattr(josm_preset, 'value'): project.josm_preset = josm_preset.value.decode('UTF-8') # Remove the previously set priority areas for area in project.priority_areas: DBSession.delete(area) project.priority_areas[:] = [] DBSession.flush() priority_areas = request.params.get('priority_areas', '') if priority_areas != '': geoms = parse_geojson(priority_areas) for geom in geoms: geom = 'SRID=4326;%s' % geom.wkt project.priority_areas.append(PriorityArea(geom)) DBSession.add(project) return HTTPFound( location=route_path('project', request, project=project.id)) translations = project.translations.items() features = [] for area in project.priority_areas: features.append(Feature(geometry=shape.to_shape(area.geometry))) return dict(page_id='project_edit', project=project, licenses=licenses, translations=translations, priority_areas=FeatureCollection(features))
geo_ = ','.join([str(elem) for elem in geo]) geo_string.append(geo_) list_geometry = ';'.join([str(elem) for elem in geo_string]) url = 'http://ivolab:5000/match/v1/driving/' + list_geometry + '?steps=false&geometries=geojson&overview=full&annotations=false&tidy=true' response = requests.get(url) # for i in range(len(geometry=response.json()['matchings'])): # geometry = [response.json['matchings'][i]['geometry']['coordinates'] # for i in range(len(response.json['matchings'][i]))] geooo= [] for i in range(len(response.json()['matchings'])): geooo.append(response.json()['matchings'][i]['geometry']['coordinates']) geometry = LineString([y for x in geooo for y in x]) for i in range(len(response.json()['matchings'])): for j in range(len(response.json()['matchings'][i]['legs'])): distance += response.json()['matchings'][i]['legs'][j]['distance'] prop = { "country": "Singapore" } geometryJSON = Feature(geometry = geometry, properties = prop) # geometryJSON = FeatureCollection([geometryJSON], crs = crs) with open("mapmatched/test6.geojson", "w") as file: dump(geometryJSON, file)
def addProcess(response, *args, **kwargs): #print 'START addProcess' global intab global outtab global trantab global exportFeatures global oglasData reqUrl = response.url addProperties={ "title": oglasData[reqUrl]['addTitle'], "url": reqUrl } try: # open details page addDetailsTree = html.fromstring(response.content) # gMapsUrl gMapsUrl = addDetailsTree.xpath('//*[@id="base-entity-map-tab"]/div/a/@href') if gMapsUrl: #print gMapsUrl o = urlparse(gMapsUrl[0]) #print o.query qs = parse_qs(o.query) #print qs['q'] #print qs['q'][0] coords = qs['q'][0].split(',') #print coords else: gMapsUrl = [None] coords = [None] addProperties['mapUrl'] = gMapsUrl[0] addProperties['cijenaHRK'] = addDetailsTree.xpath('//strong[@class="price price--hrk"]/text()')[0].strip() addProperties['cijenaEUR'] = addDetailsTree.xpath('//strong[@class="price price--eur"]/text()')[0].strip() addProperties['objavljeno'] = addDetailsTree.xpath('//time/@datetime')[0].strip() addProperties['prikazano_puta'] = re.search( '"displayCountText":(.*?),"displayExpiresOnText', addDetailsTree.xpath('/html/head/script')[0].text ).group(1) # Podaci o kuci addAttr = addDetailsTree.xpath('//*[@id="base-entity-data-tab"]/div/div[2]/div[1]/table/tbody/tr') for attr in addAttr: colName = attr.xpath('th/text()')[0].strip(':') if isinstance(colName, str): colName = unicode(colName, 'utf-8') colnameNew = colName.translate(trantab) attrVal = attr.xpath('td/time/text()') if attrVal == []: attrVal = attr.xpath('td/text()')[0] else: attrVal = attr.xpath('td/time/text()')[0] addProperties[colnameNew] = attrVal #print addAttr # dodaci addDod = addDetailsTree.xpath('//*[@id="base-entity-data-tab"]/div/div[2]/div[2]/ul/li/text()') for colName in addDod: if isinstance(colName, str): colName = unicode(colName, 'utf-8') colnameNew = colName.translate(trantab) addProperties[colnameNew] = True my_feature = Feature( geometry=( Point((float(coords[1]), float(coords[0]))) if gMapsUrl!=[None] else None ), id=int(oglasData[reqUrl]['dataAddId']), properties=addProperties ) #print my_feature exportFeatures.append(my_feature) #print 'END addProcess' response.close() except: print 'error %s' % reqUrl addProperties['error'] = True my_feature = Feature( geometry=(None), id=int(oglasData[reqUrl]['dataAddId']), properties=addProperties ) #print my_feature exportFeatures.append(my_feature) #print 'END addProcess' response.close()
def multimodal_directions(origin, destination, modes, API_KEY): # Store GeoJSON features in a list results = [] # Store durations and start / stop times durations = [] starttimes = [] endtimes = [] for mode in modes: # Get data from Google Maps Directions API data = gmaps_directions(origin, destination, mode, API_KEY) # Check to see if no routes returned. if len(data['routes']) == 0: sys.exit( "Sorry, directions are not available for {} from {} to {}". format(mode, origin, destination)) # Get duration in seconds if 'duration_in_traffic' in data['routes'][0]['legs'][0]: duration = data['routes'][0]['legs'][0]['duration_in_traffic'][ 'value'] else: duration = data['routes'][0]['legs'][0]['duration']['value'] # Calculate arrival time arrival_time = departure_time + timedelta(0, duration) # Get polyline polyline = data['routes'][0]['overview_polyline']['points'] # Decode polyline decoded_polyline = decode_polyline(polyline) # Create LineString linestring = LineString(decoded_polyline) # Create GeoJSON properties properties = { 'mode': mode, 'duration': duration, 'start': departure_time.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3], 'end': arrival_time.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] } # Create GeoJSON feature feature = Feature(geometry=linestring, properties=properties) # Store feature in results list results.append(feature) # Store duration and start/stop times in lists durations.append(duration) starttimes.append(departure_time) endtimes.append(arrival_time) # Convert list of features to GeoJSON FeatureCollection feature_collection = FeatureCollection(results) return feature_collection, durations, starttimes, endtimes
datetime_detected = parser.parse(str(detection[5]) + str(detection[6])) detected = datetime_detected - timedelta(hours=8) detected = detected.strftime("%b %d %Y %H:%M:%S") modisIncident = {'longitude':px, 'latitude':py, 'detected':detected} wkt = "POINT (%f %f)" % (px, py) ogrPoint = ogr.CreateGeometryFromWkt(wkt) ogrPoint.Transform(coordTrans) wkt = ogrPoint.ExportToWkt() modisPoint = Point((ogrPoint.GetX(), ogrPoint.GetY())) #GeoJSON point modisObject = Feature(geometry=modisPoint, properties=modisIncident) if datetime_detected > datetime.utcnow() - timedelta(hours=24): modisObject.properties['group'] = 24 modis24.append(modisObject) elif datetime_detected > datetime.utcnow() - timedelta(hours=48): modisObject.properties['group'] = 48 modis48.append(modisObject) else: modisObject.properties['group'] = 7 modis7.append(modisObject) #modisList.append(modisObject) proj = '"crs": {"type": "link", "properties": {"href": "http://spatialreference.org/ref/sr-org/7483/ogcwkt/", "type": "ogcwkt"}}' modis24.append(proj)
for line in jsondata: if not line.get('country'): continue country = line['country'] # don't need the country in the dict del line['country'] #if country in data: # data[country].append(line) #else: # data[country] = [ line ] lat = float(line['lat']) lng = float(line['lng']) feature = Feature(geometry=Point((lng, lat))) del line['lat'] del line['lng'] feature.properties = line features.append(feature) #except Exception as e: # print str(e) with open('../data/bordercrossings/bordercrossings.geojson', 'w') as outfile: json.dump(FeatureCollection(features), outfile)
def data_fetch(): results = query_resulted_in_csv(server, query_geom_symbol) feature_symboliser = {} symbolisers_geoms = {} """parse the queries results""" for row in results: symboliser_info = () feature_symboliser[str(row.symboliser)] = () query = query_symboliser.substitute(symboliser=str(row.symboliser)) # print(query) results_symbolisers = query_resulted_in_csv(server, query) # print(list(results_symbolisers)[0][0]) for row1 in results_symbolisers: # print(str(row1.h), str(row1.s), str(row1.v), str(row1.stroke_width), str(row1.size)) symboliser_info = (float(str(row1.h)), float(str(row1.s)), float(str(row1.v)), str(row1.stroke_width), str(row1.size)) if symboliser_info not in symbolisers_geoms.keys(): symbolisers_geoms[symboliser_info] = {} symbolisers_geoms[symboliser_info][str(row.feature)] = str( row.featureWKT) # print(symbolisers_geoms) symbolisers_dict = {} featurecollection_list = [] for key in symbolisers_geoms.keys(): symboliser_type = '' """This part maps symboliser info to JS Leaflft style""" rgb_decimal = matplotlib.colors.hsv_to_rgb( [key[0] / 360, key[1], key[2]]) color_hex = matplotlib.colors.to_hex(rgb_decimal) # print(color_hex) if key[3] != 'None': symboliser_type = 'https://www.gis.lu.se/ont/data_portrayal/symboliser#LineSymboliser' new_key = default_line_style new_key['color'] = color_hex new_key['weight'] = key[3] if key[4] != 'None': symboliser_type = 'https://www.gis.lu.se/ont/data_portrayal/symboliser#PointSymboliser' new_key = default_point_style new_key['fillColor'] = color_hex new_key['radius'] = int(key[4]) symbolisers_dict[key] = new_key """This part is GeoJSON conversion""" feature_list = [] for feature_key in symbolisers_geoms[key].keys(): geom = ogr.CreateGeometryFromWkt( symbolisers_geoms[key][feature_key]) geom_geojson = loads(geom.ExportToJson()) feature_geojson = Feature(geometry=geom_geojson, properties={"URI": feature_key}) feature_list.append(feature_geojson) feature_collection = FeatureCollection(feature_list) feature_collection['style'] = symbolisers_dict[key] feature_collection['symboliser_type'] = symboliser_type # print(feature_collection) # feature_collection = geojson.loads(dump) featurecollection_list.append(geojson.dumps(feature_collection)) return featurecollection_list
points.append(p) spots.append(p.buffer(buffer_distance)) for feature in js['features']: if feature['geometry'] and feature['geometry']['type'] == 'LineString': l = LineString(feature['geometry']['coordinates']) lines.append(l) spots.append(l.buffer(buffer_distance)) patches = cascaded_union(spots) # see also: object.symmetric_difference(other) import copy patches = [] for spot1 in spots: sfinal = copy.deepcopy(spot1) # for spot2 in spots: # if spot1 != spot2: # sfinal = spot1.difference(spot2) patches.append(sfinal) features = [] for p in patches: f = Feature(geometry=p) f.pop('id', None) features.append(f) fc = FeatureCollection(features) print(geojson.dumps(fc))
def get_labs(format): """Gets Hackerspaces data from hackaday.io.""" hackerspaces_json = data_from_hackaday_io(hackaday_io_labs_map_url) hackerspaces = {} # Load all the Hackerspaces for i in hackerspaces_json: current_lab = Hackerspace() current_lab.id = i["id"] current_lab.url = "https://hackaday.io/hackerspace/" + current_lab.id current_lab.name = i["name"] if len(i["description"]) != 0: current_lab.description = i["description"] elif len(i["summary"]) != 0: current_lab.description = i["summary"] current_lab.created_at = i["moments"]["exact"] # Check if there are coordinates if i["latlon"] is not None: latlon = json.loads(i["latlon"]) current_lab.latitude = latlon["lat"] current_lab.longitude = latlon["lng"] # Get country, county and city from them country = geolocator.reverse([latlon["lat"], latlon["lng"]]) current_lab.country = country.raw["address"]["country"] current_lab.address = country.raw["display_name"] current_lab.address_1 = country.raw["display_name"] current_lab.country_code = country.raw["address"]["country_code"] current_lab.county = country.raw["address"]["state_district"] current_lab.city = country.raw["address"]["city"] current_lab.postal_code = country.raw["address"]["postcode"] else: # For labs without a location or coordinates # add 0,0 as coordinates current_lab.latitude = 0.0 current_lab.longitude = 0.0 # Add the lab hackerspaces[i["name"]] = current_lab # Return a dictiornary / json if format.lower() == "dict" or format.lower() == "json": output = {} for j in hackerspaces: output[j] = hackerspaces[j].__dict__ # Return a geojson elif format.lower() == "geojson" or format.lower() == "geo": labs_list = [] for l in hackerspaces: single = hackerspaces[l].__dict__ single_lab = Feature(type="Feature", geometry=Point((single["latitude"], single["longitude"])), properties=single) labs_list.append(single_lab) output = dumps(FeatureCollection(labs_list)) # Return a Pandas DataFrame elif format.lower() == "pandas" or format.lower() == "dataframe": output = {} for j in hackerspaces: output[j] = hackerspaces[j].__dict__ # Transform the dict into a Pandas DataFrame output = pd.DataFrame.from_dict(output) output = output.transpose() # Return an object elif format.lower() == "object" or format.lower() == "obj": output = hackerspaces # Default: return an oject else: output = hackerspaces # Return a proper json if format.lower() == "json": output = json.dumps(output) return output
def response(context, flow): with decoded(flow.response): if flow.match("~d pgorelease.nianticlabs.com"): env = RpcResponseEnvelopeProto() env.ParseFromString(flow.response.content) key = request_api[env.response_id] value = env.returns[0] name = Method.Name(key) name = mismatched_apis.get(name, name) #return class name when not the same as method klass = underscore_to_camelcase(name) + "OutProto" try: mor = deserialize(value, "." + klass) print("Deserialized Response %s" % name) except: print("Missing Response API: %s" % name) if (key == GET_MAP_OBJECTS): features = [] bulk = [] for cell in mor.MapCell: for fort in cell.Fort: props = { "id": fort.FortId, "LastModifiedMs": fort.LastModifiedMs, } if fort.FortType == CHECKPOINT: props["marker-symbol"] = "circle" props["title"] = "PokéStop" props["type"] = "pokestop" props["lure"] = fort.HasField('FortLureInfo') else: props["marker-symbol"] = "town-hall" props["marker-size"] = "large" props["type"] = "gym" if fort.Team == BLUE: props["marker-color"] = "0000FF" props["title"] = "Blue Gym" elif fort.Team == RED: props["marker-color"] = "FF0000" props["title"] = "Red Gym" elif fort.Team == YELLOW: props["marker-color"] = "FF0000" props["title"] = "Yellow Gym" else: props["marker-color"] = "808080" p = Point((fort.Longitude, fort.Latitude)) f = Feature(geometry=p, id=fort.FortId, properties=props) features.append(f) bulk.append(createItem("gym", fort.FortId, p, f.properties)) for spawn in cell.SpawnPoint: p = Point((spawn.Longitude, spawn.Latitude)) f = Feature(geometry=p, id=len(features), properties={ "type": "spawn", "id": len(features), "title": "spawn", "marker-color": "00FF00", "marker-symbol": "garden", "marker-size": "small", }) features.append(f) bulk.append(createItem("spawnpoint", 0, p, f.properties)) for spawn in cell.DecimatedSpawnPoint: p = Point((spawn.Longitude, spawn.Latitude)) f = Feature(geometry=p, id=len(features), properties={ "id": len(features), "type": "decimatedspawn", "title": "Decimated spawn", "marker-color": "000000", "marker-symbol": "monument" }) features.append(f) for pokemon in cell.WildPokemon: p = Point((pokemon.Longitude, pokemon.Latitude)) f = Feature(geometry=p, id="wild" + str(pokemon.EncounterId), properties={ "id": "wild" + str(pokemon.EncounterId), "type": "wild", "TimeTillHiddenMs": pokemon.TimeTillHiddenMs, "WillDisappear": pokemon.TimeTillHiddenMs + pokemon.LastModifiedMs, "title": "Wild %s" % Custom_PokemonName.Name(pokemon.Pokemon.PokemonId), "marker-color": "FF0000", "marker-symbol": "suitcase" }) features.append(f) bulk.append(createItem("pokemon", pokemon.EncounterId, p, f.properties)) for pokemon in cell.CatchablePokemon: p = Point((pokemon.Longitude, pokemon.Latitude)) f = Feature(geometry=p, id="catchable" + str(pokemon.EncounterId), properties={ "id": "catchable" + str(pokemon.EncounterId), "type": "catchable", "ExpirationTimeMs": pokemon.ExpirationTimeMs, "title": "Catchable %s" % Custom_PokemonName.Name(pokemon.PokedexTypeId), "marker-color": "000000", "marker-symbol": "circle" }) features.append(f) for poke in cell.NearbyPokemon: gps = request_location[env.response_id] if poke.EncounterId in pokeLocation: add = True for loc in pokeLocation[poke.EncounterId]: if gps[0] == loc[0] and gps[1] == loc[1]: add = False if add: pokeLocation[poke.EncounterId].append((gps[0], gps[1], poke.DistanceMeters/1000)) else: pokeLocation[poke.EncounterId] = [(gps[0], gps[1], poke.DistanceMeters/1000)] if len(pokeLocation[poke.EncounterId]) >= 3: lat, lon = triangulate(pokeLocation[poke.EncounterId][0],pokeLocation[poke.EncounterId][1],pokeLocation[poke.EncounterId][2]) if not math.isnan(lat) and not math.isnan(lon) : p = Point((lon, lat)) f = Feature(geometry=p, id="nearby" + str(poke.EncounterId), properties={ "id": "nearby" + str(poke.EncounterId), "type": "nearby", "title": "Nearby %s" % Custom_PokemonName.Name(poke.PokedexNumber), "marker-color": "FFFFFF", "marker-symbol": "dog-park" }) bulk.append(createItem("pokemon", poke.EncounterId, p, f.properties)) features.append(f) fc = FeatureCollection(features) dump = geojson.dumps(fc, sort_keys=True) dumpToMap(bulk) f = open('ui/get_map_objects.json', 'w') f.write(dump)
def response(context, flow): with decoded(flow.response): if flow.match("~d pgorelease.nianticlabs.com"): env = RpcResponseEnvelopeProto() env.ParseFromString(flow.response.content) key = request_api[env.response_id] value = env.returns[0] name = Holoholo.Rpc.Method.Name(key) name = mismatched_apis.get(name, name) #return class name when not the same as method klass = underscore_to_camelcase(name) + "OutProto" try: mor = deserialize(value, "." + klass) print("Deserialized Response %s" % name) except: print("Missing Response API: %s" % name) if (key == Holoholo.Rpc.GET_MAP_OBJECTS): features = [] for cell in mor.MapCell: for fort in cell.Fort: p = Point((fort.Longitude, fort.Latitude)) if fort.FortType == Holoholo.Rpc.CHECKPOINT: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Pokestop", "marker-color": "00007F", "marker-symbol": "town-hall"}) features.append(f) else: f = None if fort.Team == BLUE: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Blue Gym", "marker-color": "0000FF", "marker-symbol": "town-hall", "marker-size": "large"}) elif fort.Team == RED: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Red Gym", "marker-color": "FF0000", "marker-symbol": "town-hall", "marker-size": "large"}) elif fort.Team == YELLOW: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Yellow Gym", "marker-color": "FFFF00", "marker-symbol": "town-hall", "marker-size": "large"}) else: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Neutral Gym", "marker-color": "808080", "marker-symbol": "town-hall", "marker-size": "large"}) features.append(f) for spawn in cell.SpawnPoint: p = Point((spawn.Longitude, spawn.Latitude)) f = Feature(geometry=p, id=len(features), properties={"id": len(features), "title": "spawn", "marker-color": "00FF00", "marker-symbol": "garden"}) features.append(f) for spawn in cell.DecimatedSpawnPoint: p = Point((spawn.Longitude, spawn.Latitude)) f = Feature(geometry=p, id=len(features), properties={"id": len(features), "title": "decimated spawn", "marker-color": "000000", "marker-symbol": "monument"}) features.append(f) for pokemon in cell.WildPokemon: p = Point((pokemon.Longitude, pokemon.Latitude)) f = Feature(geometry=p, id=len(features), properties={"id": len(features), "TimeTillHiddenMs": pokemon.TimeTillHiddenMs, "title": "Wild %s" % Custom_PokemonName.Name(pokemon.Pokemon.PokemonId), "marker-color": "FF0000", "marker-symbol": "suitcase"}) features.append(f) for pokemon in cell.CatchablePokemon: p = Point((pokemon.Longitude, pokemon.Latitude)) f = Feature(geometry=p, id=len(features), properties={"id": len(features), "ExpirationTimeMs": pokemon.ExpirationTimeMs, "title": "Catchable %s" % Custom_PokemonName.Name(pokemon.PokedexTypeId), "marker-color": "000000", "marker-symbol": "circle"}) features.append(f) for poke in cell.NearbyPokemon: gps = request_location[env.response_id] if poke.EncounterId in pokeLocation: add=True for loc in pokeLocation[poke.EncounterId]: if gps[0] == loc[0] and gps[1] == loc[1]: add=False if add: pokeLocation[poke.EncounterId].append((gps[0], gps[1], poke.DistanceMeters/1000)) else: pokeLocation[poke.EncounterId] = [(gps[0], gps[1], poke.DistanceMeters/1000)] if len(pokeLocation[poke.EncounterId]) >= 3: lat, lon = triangulate(pokeLocation[poke.EncounterId][0],pokeLocation[poke.EncounterId][1],pokeLocation[poke.EncounterId][2]) if not math.isnan(lat) and not math.isnan(lon) : p = Point((lon, lat)) f = Feature(geometry=p, id=len(features), properties={"id": len(features), "title": "Nearby %s" % Custom_PokemonName.Name(poke.PokedexNumber), "marker-color": "FFFFFF", "marker-symbol": "dog-park"}) features.append(f) fc = FeatureCollection(features) dump = geojson.dumps(fc, sort_keys=True) f = open('ui/get_map_objects.json', 'w') f.write(dump)
def run_route(start_node_id, end_node_id, route_type): ''' :param start_node_id: :param end_node_id: :param route_type: :return: ''' cur = connection.cursor() base_route_q = """SELECT ogc_fid AS id, source, target, total_cost AS cost, layer, type_id FROM geodata.networklines_3857""" # set default query barrierfree_q = "WHERE 1=1" if route_type == "1": # exclude all networklines of type stairs barrierfree_q = "WHERE type_id not in (3,4)" routing_query = ''' SELECT seq, id1 AS node, id2 AS edge, ST_Length(wkb_geometry) AS cost, layer, type_id, ST_AsGeoJSON(wkb_geometry) AS geoj FROM pgr_dijkstra(' {normal} {type}', %s, %s, FALSE, FALSE ) AS dij_route JOIN geodata.networklines_3857 AS input_network ON dij_route.id2 = input_network.ogc_fid ; '''.format(normal=base_route_q, type=barrierfree_q) # run our shortest path query if start_node_id or end_node_id: cur.execute(routing_query, (start_node_id, end_node_id)) else: logger.error("start or end node is None " + str(start_node_id)) return HttpResponseNotFound('<h1>Sorry NO start or end node' ' found within 200m</h1>') # get entire query results to work with route_segments = cur.fetchall() # empty list to hold each segment for our GeoJSON output route_result = [] # loop over each segment in the result route segments # create the list of our new GeoJSON for segment in route_segments: seg_cost = segment[3] # cost value layer_level = segment[4] # floor number seg_type = segment[5] geojs = segment[6] # geojson coordinates geojs_geom = loads(geojs) # load string to geom geojs_feat = Feature(geometry=geojs_geom, properties={ 'floor': layer_level, 'length': seg_cost, 'type_id': seg_type }) route_result.append(geojs_feat) # using the geojson module to create our GeoJSON Feature Collection geojs_fc = FeatureCollection(route_result) return geojs_fc
def response(context, flow): with decoded(flow.response): if flow.match("~d pgorelease.nianticlabs.com"): env = RpcResponseEnvelopeProto() env.ParseFromString(flow.response.content) key = request_api[env.response_id] value = env.returns[0] if (key == GET_MAP_OBJECTS): mor = GetMapObjectsOutProto() mor.ParseFromString(value) print("GET_MAP_OBJECTS %i cells" % len(mor.cells)) features = [] for cell in mor.cells: print("S2 Cell %i" % cell.S2CellId) for fort in cell.Fort: p = Point((fort.Longitude, fort.Latitude)) if fort.FortType == 1: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Pokestop", "marker-color": "00007F", "marker-symbol": "town-hall"}) features.append(f) else: f = None if fort.Team == BLUE: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Blue Gym", "marker-color": "0000FF", "marker-symbol": "town-hall", "marker-size": "large"}) elif fort.Team == RED: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Red Gym", "marker-color": "FF0000", "marker-symbol": "town-hall", "marker-size": "large"}) elif fort.Team == YELLOW: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Yello Gym", "marker-color": "FFFF00", "marker-symbol": "town-hall", "marker-size": "large"}) else: f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Unclaimed Gym", "marker-color": "808080", "marker-symbol": "town-hall", "marker-size": "large"}) features.append(f) for spawn in cell.SpawnPoint: p = Point((spawn.Longitude, spawn.Latitude)) f = Feature(geometry=p, id=len(features), properties={"title": "spawn", "marker-color": "00FF00", "marker-symbol": "garden"}) features.append(f) for spawn in cell.DecimatedSpawnPoint: p = Point((spawn.Longitude, spawn.Latitude)) f = Feature(geometry=p, id=len(features), properties={"title": "decimated spawn", "marker-color": "000000", "marker-symbol": "monument"}) features.append(f) for pokemon in cell.WildPokemon: p = Point((pokemon.Longitude, pokemon.Latitude)) f = Feature(geometry=p, id=len(features), properties={"title": "Wild pokemon: %i" % pokemon.Pokemon, "type": "wild pokemon", "marker-color": "FF0000", "marker-symbol": "suitcase"}) features.append(f) for pokemon in cell.CatchablePokemon: p = Point((pokemon.Longitude, pokemon.Latitude)) f = Feature(geometry=p, id=len(features), properties={"title": "Catchable pokemon: %i" % pokemon.PokedexTypeId, "type": "catchable pokemon", "marker-color": "000000", "marker-symbol": "circle"}) features.append(f) for poke in cell.NearbyPokemon: gps = request_location[env.response_id] if poke.EncounterId in pokeLocation: add=True for loc in pokeLocation[poke.EncounterId]: if gps[0] == loc[0] and gps[1] == loc[1]: add=False if add: pokeLocation[poke.EncounterId].append((gps[0], gps[1], poke.DistanceMeters/1000)) else: pokeLocation[poke.EncounterId] = [(gps[0], gps[1], poke.DistanceMeters/1000)] if len(pokeLocation[poke.EncounterId]) >= 3: lat, lon = triangulate(pokeLocation[poke.EncounterId][0],pokeLocation[poke.EncounterId][1],pokeLocation[poke.EncounterId][2]) if not math.isnan(lat) and not math.isnan(lon) : p = Point((lon, lat)) f = Feature(geometry=p, id=len(features), properties={"title": "nearby pokemon", "marker-color": "FFFFFF", "marker-symbol": "dog-park"}) features.append(f) fc = FeatureCollection(features) dump = geojson.dumps(fc, sort_keys=True) f = open('ui/get_map_objects.json', 'w') f.write(dump) elif (key == FORT_DETAILS): mor = FortDetailsOutProto() mor.ParseFromString(value) print(mor) elif (key == FORT_SEARCH): mor = FortSearchOutProto() mor.ParseFromString(value) print(mor) else: print("API: %s" % key)