def search_source_locations_as_geojson( vial_http: urllib3.connectionpool.ConnectionPool, **kwds: Any, ) -> Iterator[geojson.Feature]: """Wrapper around search source locations api. Returns geojson.""" params = { **kwds, "format": "nlgeojson", } query = urllib.parse.urlencode(params) path_and_query = f"/api/searchSourceLocations?{query}" logger.info("Contacting VIAL: GET %s", path_and_query) resp = vial_http.request("GET", path_and_query, preload_content=False) line_num = 0 for line_num, line in enumerate(resp, start=1): if line_num % 5000 == 0: logger.info("Processed %d source location records from VIAL.", line_num) try: record = orjson.loads(line) except json.JSONDecodeError as e: logger.warning( "Invalid json record in source search response: %s\n%s", line, str(e) ) continue _clean_geojson_record(record) try: feature = geojson.Feature(**record) except ValueError as e: logger.warning( "Invalid geojson record in source search response: %s\n%s", line, str(e) ) continue yield feature logger.info("Processed %d total source location records from VIAL.", line_num) resp.release_conn()
def _create_split_tasks_from_geometry(task) -> list: """ Splits a task into 4 smaller tasks based purely on the task's geometry rather than an OSM tile identified by x, y, zoom :return: list of {geojson.Feature} """ # Load the task's geometry and calculate its centroid and bbox query = db.session.query( Task.id, Task.geometry.ST_AsGeoJSON().label("geometry")).filter( Task.id == task.id, Task.project_id == task.project_id) task_geojson = geojson.loads(query[0].geometry) geometry = shapely_shape(task_geojson) centroid = geometry.centroid minx, miny, maxx, maxy = geometry.bounds # split geometry in half vertically, then split those halves in half horizontally split_geometries = [] vertical_dividing_line = LineString([(centroid.x, miny), (centroid.x, maxy)]) horizontal_dividing_line = LineString([(minx, centroid.y), (maxx, centroid.y)]) vertical_halves = SplitService._as_halves( split(geometry, vertical_dividing_line), centroid, "x") for half in vertical_halves: split_geometries += SplitService._as_halves( split(half, horizontal_dividing_line), centroid, "y") # convert split geometries into GeoJSON features expected by Task split_features = [] for split_geometry in split_geometries: feature = geojson.Feature() # Tasks expect multipolygons. Convert and use the database to get as GeoJSON multipolygon_geometry = shape.from_shape(split_geometry, 4326) feature.geometry = geojson.loads( db.engine.execute( multipolygon_geometry.ST_AsGeoJSON()).scalar()) feature.properties["x"] = None feature.properties["y"] = None feature.properties["zoom"] = None feature.properties["isSquare"] = False split_features.append(feature) return split_features
def csv_to_json(csv_file, start_row=1, end_row=400, start_path=1, end_path=373): """ Dumps scene geojson from path/row information Input: start_row, end_row: only rows within this range are processed """ features = [] center_lat = None center_lon = None with open(csv_file, 'r') as fcsv: for line in fcsv: line = line[:-1] #print line fields = line.split(',') if fields[0] == 'PATH': # Header continue path = int(fields[0]) row = int(fields[1]) previous_center_lat = center_lat previous_center_lon = center_lon center_lat = float(fields[2]) center_lon = float(fields[3]) # Skip configured paths and rows if row < start_row or row > end_row: continue if path < start_path or path > end_path: continue #print fields pol = center_to_scene_boundaries(center_lat, center_lon, previous_center_lat, previous_center_lon) if pol: feature = geojson.Feature(geometry=pol, properties={"PATH":path, "ROW":row}) features.append(feature) fc = geojson.FeatureCollection(features) print geojson.dumps(fc)
def parsePoint(inputCSV): myFeatures = [] # Check Column Name LineCN = {'name', 'sort', 'zip', 'address', 'Longitude', 'Latitude'} for cn in LineCN: if cn not in inputCSV.columns: print("EE CSV Column ERROR : %s Not Found." % (cn)) sys.exit() for index, row in inputCSV.iterrows(): # Set Properties except geometory myProperties = {} for col in inputCSV.columns: value = row[col] if col == "Longitude": lon = float(value) continue if col == "Latitude": lat = float(value) continue if col == "Altitude": # Ignore Altitude continue # Check NaN -> "" if value != value: value = "" if col == "No" and type(value) is float: myProperties[col] = str(int(value)) else: myProperties[col] = str(value) myPoint = geojson.Point([lon, lat]) # Append Point Data myFeatures.append( geojson.Feature(id=index, properties=myProperties, geometry=myPoint)) myFeatureCollection = geojson.FeatureCollection(myFeatures) return myFeatureCollection
def get_map_for_coordinates(coordinates): ACCESS_TOKEN = os.getenv("MAPBOX_ACCESS_TOKEN") USERNAME = os.getenv("MAPBOX_STYLE_USER") STYLE_ID = os.getenv("MAPBOX_STYLE_ID") service = StaticStyle() multipoint = geojson.MultiPoint(coordinates) feature = geojson.Feature(geometry=multipoint) print(feature) response = service.image(username=USERNAME, style_id=STYLE_ID, features=[feature], width=1200, height=1200, retina=True) print(response.status_code) with open(args.directory + '/_map.png', 'wb') as output: _ = output.write(response.content)
def geojson_nodes(overpass_json): features = [] geometry = None for elem in overpass_json['elements']: elem_type = elem.get("type") if elem_type and elem_type == "node": geometry = geojson.Point((elem.get("lon"), elem.get("lat"))) feature = geojson.Feature(id=elem['id'], geometry=geometry, properties=elem.get("tags")) features.append(feature) return geojson.FeatureCollection(features)
def _row_to_feature(self, row, projections): #col_map {'id':0, 'geom':1, 'properties':[('digest', 2),('name':3),('type':4 )] } # convert to GeoJSON feature collection fid = None geometry = None properties = {} for i in range(len(projections)): proj = projections[i] if proj["type"] == "id": fid = row[i] elif proj["type"] == "geometry": geometry = geojson.loads(row[i]) else: properties[proj["name"]] = row[i] return geojson.Feature(id=fid, geometry=geometry, properties=properties)
def features(self, poly, point_3d): self.poly = poly self.point_3d = point_3d division = multiprocessing.cpu_count() index = list(range(0, len(poly))) print(len(poly)) self.maximum_points = len(index) // division + 1 pool = multiprocessing.Pool(processes=multiprocessing.cpu_count()) result = pool.map( self.calc, range(division), chunksize=1) # process data_inputs iterable with pool height = [] for divo in range(division): height = height + result[divo] print(divo, len(result[divo])) print(len(height), len(poly)) final = {"type": "FeatureCollection", "features": []} for i in range(len(poly)): geojson_out = geojson.Feature(geometry=poly[i]) feature = { "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [] }, "properties": { "id": i, "height": height[i] } } feature['geometry'] = geojson_out.geometry final['features'].append(feature) with open('data/' + folder_name + '/jsons/buildings.json', 'w') as outfile: json.dump(final, outfile) return
def _parse_track_gpx(self, b): gpx = gpxpy.parse(b.decode('utf-8')) name_re = re.compile(r'^((?:CA|OR|WA) Sec [A-Z])(?: - (.+))?$') features = [] for track in gpx.tracks: assert len(track.segments) == 1, 'More than 1 segment in GPX track' l = LineString([(x.longitude, x.latitude, x.elevation) for x in track.segments[0].points]) section_name, alt_name = name_re.match(track.name).groups() name = alt_name if alt_name else section_name alternate = bool(alt_name) properties = {'name': name, 'alternate': alternate} features.append(geojson.Feature(geometry=l, properties=properties)) return geojson.FeatureCollection(features)
def combine_predictions_and_segments(predictions, segments): """ Combine predictions data with certain properties of their related segment. """ print("combining predictions with segments") combined_preds = [] # turns segments into a dict for quick lookup segments_dict = {str(segment["id"]): segment for segment in segments} for pred_data in predictions: segment = segments_dict[str(pred_data["segment_id"])] prop = { "prediction": pred_data["prediction"], "crash": pred_data["crash"], "segment_id": pred_data["segment_id"] } # Eventually handle osm_speed vs SPEEDLIMIT as part # of the configuration if 'SPEEDLIMIT' in pred_data: prop['SPEEDLIMIT'] = pred_data['SPEEDLIMIT'] elif 'osm_speed' not in pred_data: prop['osm_speed'] = 0 else: prop['osm_speed'] = pred_data['osm_speed'] prop["segment"] = { "id": str(segment["id"]), "display_name": segment["properties"]["display_name"], "center_x": segment["properties"]["center_x"], "center_y": segment["properties"]["center_y"] } combined_preds.append( geojson.Feature(geometry=segment["geometry"], properties=prop)) # Sort highest risk to lowest risk combined_preds = sorted(combined_preds, key=lambda x: x['properties']['prediction'], reverse=True) return combined_preds
def get_tasks_as_geojson_feature_collection(project_id): """ Creates a geoJson.FeatureCollection object for all tasks related to the supplied project ID :param project_id: Owning project ID :return: geojson.FeatureCollection """ project_tasks = \ db.session.query(Task.id, Task.x, Task.y, Task.zoom, Task.splittable, Task.task_status, Task.geometry.ST_AsGeoJSON().label('geojson')).filter(Task.project_id == project_id).all() tasks_features = [] for task in project_tasks: task_geometry = geojson.loads(task.geojson) task_properties = dict(taskId=task.id, taskX=task.x, taskY=task.y, taskZoom=task.zoom, taskSplittable=task.splittable, taskStatus=TaskStatus(task.task_status).name) feature = geojson.Feature(geometry=task_geometry, properties=task_properties) tasks_features.append(feature) return geojson.FeatureCollection(tasks_features)
def data2geojson(df): features = [] insert_features = lambda X: features.append( geojson.Feature(geometry=geojson.LineString( ([X["lead_lon"], X["lead_lat"]], [X["lon"], X["lat"]])), properties=dict(mmsi=X["mmsi"], operator=X["operator"], speed_knots=X["speed_knots"], implied_speed_knots=X[ "implied_speed_knots"], calculated_knots=X[ "calculated_knots"]))) df.apply(insert_features, axis=1) #with open('/Users/seangoral/bq_api_test/map1.geojson', 'w', encoding='utf8') as fp: geojson_obj = geojson.dumps(geojson.FeatureCollection( features, indent=2, sort_keys=True), sort_keys=True, ensure_ascii=False) return (geojson_obj)
def way(self, w): if not w.is_closed(): return if 'amenity' not in w.tags or w.tags['amenity'] != 'parking': return if 'parking' in w.tags: if w.tags['parking'] in self.parking_filter: return geometry = geojson.Polygon([[(n.lon, n.lat) for n in w.nodes]]) shape = shapely.geometry.shape(geometry) if shape.is_valid: feature = geojson.Feature(geometry=geometry) self.features.append(feature) else: print('Warning: invalid feature: https://www.openstreetmap.org/way/{}'.format(w.id), file=sys.stderr)
def get_geojson_for_way(self, way_id): """Construct GeoJSON Feature with LineString geometry for way Args: - way_id: OSM way id Returns: geojson.Feature with LineString geometry of way """ way_info = self.get_info(way=way_id) node_ids = self.get_node_ids_for_way(way_id) points = [] for node_id in node_ids: node_info = self.get_info(node=node_id) points.append([node_info['lon'], node_info['lat']]) line = geojson.LineString(points) return geojson.Feature(id=way_id, geometry=line, properties=way_info)
def tweets(): features = [] logger.info("returning listener queue of length %s", app.listener.status_queue) for status in app.listener.status_queue: properties = { "text": status.text, "source": status.source, "user": { "name": status.user.name, "id": status.user.id } } feature = geojson.Feature(id=status.id, geometry=status.coordinates, properties=properties) features.append(feature) feature_collection = geojson.FeatureCollection(features) return jsonify(feature_collection)
def crossingsv2(): table = models.Crossings bbox = request.args.get('bbox') all_rows = request.args.get('all') geojson_query = gfunc.ST_AsGeoJSON(table.geom, 7) geojson_geom = geojson_query.label('geom') if all_rows == 'true': select = db.session.query(table.id, geojson_geom, table.grade, table.curbramps) result = select.all() else: if not bbox: select = db.session.query(table.id, geojson_geom, table.grade, table.curbramps) result = select.limit(10).all() else: bounds = [float(b) for b in bbox.split(',')] in_bbox = sql_utils.in_bbox(table.geom, bounds) select = db.session.query(table.id, geojson_geom, table.grade, table.curbramps) result = select.filter(in_bbox).all() fc = geojson.FeatureCollection([]) for row in result: feature = geojson.Feature() geometry = json.loads(row.geom) for i, lonlat in enumerate(geometry['coordinates']): lon = round(lonlat[0], 7) lat = round(lonlat[1], 7) geometry['coordinates'][i] = [lon, lat] feature['geometry'] = geometry feature['properties'] = {'id': row.id, 'grade': str(round(row.grade, 3)), 'curbramps': row.curbramps} fc['features'].append(feature) return jsonify(fc)
def create_hexagons(centers, bins, radius): mproj = pyproj.Proj("+init=EPSG:3857") # Mercator h0 = hexagon(radius) features = [] for (k, c) in sorted(centers.items()): # need to know the i,j locations # flip back to lat, lon h = [mproj(pt[0] + c[0], pt[1] + c[1], inverse=True) for pt in h0] h.append(h[0]) f = geojson.Feature(id=str(k), geometry=geojson.Polygon([ h, ]), properties={'count': bins[k]}) features.append(f) fc = geojson.FeatureCollection(features) # with open(fname,"w") as f: # geojson.dump(fc,f) return fc
def geojsonfeature(obj, srid=None): if obj is None or isinstance(obj, basestring): return 'null' if srid is None: # Try to guess SRID from potential settings srid = getattr(settings, 'API_SRID', getattr(settings, 'MAP_SRID', getattr(settings, 'SRID', 4326))) geojsonvalue = '' if isinstance(obj, (GEOSGeometry, GeometryField)): if obj.srid != srid: obj.transform(srid) feature = geojson.Feature(geometry=simplejson.loads(obj.geojson)) geojsonvalue = geojson.dumps(feature) else: serializer = Serializer() geojsonvalue = serializer.serialize([obj], fields=[], srid=srid) return geojsonvalue
def companyToGeoJson(companyRow): """based on a panda row from df.iterrow()""" coord = localizeAdress(companyRow["street"], companyRow["postalCode"], companyRow["area"]) if coord: if isinstance(companyRow["branch"], str) and companyRow["branch"]: branches = list(set(companyRow["branch"].split(';'))) else: branches = [] properties = { "name": companyRow["name"], "branch": branches, "city": companyRow["area"], "postalcode": companyRow["postalCode"], "street": str(companyRow["street"]) } return geojson.Feature(geometry=coord, properties=properties) else: return None
def indexes_to_geojson(ctx): ''' ''' data = sys.stdin.readlines() indexes = [index.replace('\n', '') for index in data] features = [] for index in indexes: if not index: continue geometry = geojson.Polygon( [h3.h3_to_geo_boundary(h3_address=index, geo_json=True)]) logger.debug(f"geometry for '{index}': {geometry}") feature = geojson.Feature(geometry=geometry, id=index, properties={"index": index}) features.append(feature) features.sort(key=lambda feature: feature["id"]) feat_collection = geojson.FeatureCollection(features) print(geojson.dumps(feat_collection, indent=4))
def as_geojson(self): if self.latitude is None or self.longitude is None: kw = {} else: kw = {'geometry': geojson.Point((self.longitude, self.latitude))} return geojson.Feature(properties={ "name": self.name, "language": self.name, "family": self.family, "area": self.macroarea, "variety": "std", "key": self.gid, "glottocode": self.glottocode, "source": self.source, "lon": self.longitude, "lat": self.latitude, }, **kw)
def get_location_geojson(loc_lst): feature_lst = [] for loc in loc_lst: lat = loc.lat lng = loc.lng light_val = int(get_pixel_val(loc.lat, loc.lng)) loc_point = geojson.Point((loc.lng, loc.lat)) loc_json = geojson.Feature(geometry=loc_point, properties={ "id": loc.loc_id, "name": loc.loc_name, "light": light_val }) feature_lst.append(loc_json) locs_json = geojson.FeatureCollection(feature_lst) return locs_json
def shapelyToFoliumFeature(row): """converts a shapely feature to a folium (leaflet) feature. row needs to have a geometry column. CRS is 4326 Args: row (geoPandas.GeoDataFrame row) : the input geodataframe row. Appy this function to a geodataframe gdf.appy(function, 1) Returns: foliumFeature (folium feature) : foliumFeature with popup child. """ width, height = 310, 110 dfTemp = pd.DataFrame(row.drop("geometry")) htmlTable = dfTemp.to_html() iFrame = branca.element.IFrame(htmlTable, width=width, height=height) geoJSONfeature = geojson.Feature(geometry=row["geometry"], properties={}) foliumFeature = folium.features.GeoJson(geoJSONfeature) foliumFeature.add_child(folium.Popup(iFrame)) return foliumFeature
def prepare_geojson(elements): """ Prepares a list of elements to be written as geojson, reprojecting from 3857 to 4326 Args: elements - a list of dicts with geometry and properties Results: A geojson feature collection """ elements = reproject_records(elements, transformer_3857_to_4326) results = [ geojson.Feature( geometry=mapping(x['geometry']), id=x['properties']['id'] if 'id' in x['properties'] else '', properties=x['properties']) for x in elements ] return geojson.FeatureCollection(results)
def shape_to_geojson(shape, destination="derived/"): """Convert a shape file to GeoJSON.""" reader = shapefile.Reader(shape) fields = reader.fields[1:] names = [field[0] for field in fields] buffer = [] for record in reader.shapeRecords(): properties = dict(zip(names, record.record)) geometry = record.shape.__geo_interface__ buffer.append(geojson.Feature(geometry=geometry, properties=properties)) collection = geojson.FeatureCollection(buffer) name = get_file_name(shape) + ".geojson" with open(get_write_path(destination, name), "w") as write: geojson.dump(collection, write)
def convert_geometrycollection(wkt): wkt_obj = shapely.wkt.loads(wkt) # Get all geom types inside GeometryCollection. geom_types = map(lambda x: x.geom_type, wkt_obj.geoms) # Tally all the types found in this GeometryCollection. # Homogenous GeometryCollections will only have one type to tally. tally = {} for type in geom_types: tally[type] = len(filter(lambda x: x == type, geom_types)) # If there ever were a heterogenous GeometryCollection, the intention here # is to find what geometry type is used most often and make a new homogenous # geometry based on the most predominent geometry type. There does not # appear to be any heterogenous GeometryCollections in the Alaska EPSCoR # metadata, however, so this has only been tested against homogenous # GeometryCollections. predominant_type = max(tally, key=tally.get) # Pull geometries out of their GeometryCollection container in whatever # way is most appropriate. if predominant_type == 'Point': if tally['Point'] > 1: valid_wkt = shapely.geometry.MultiPoint(wkt_obj.geoms) else: valid_wkt = shapely.geometry.Point(wkt_obj.geoms[0]) elif predominant_type == 'LineString': if tally['LineString'] > 1: valid_wkt = shapely.geometry.MultiLineString(wkt_obj.geoms) else: valid_wkt = shapely.geometry.LineString(wkt_obj.geoms[0]) elif predominant_type == 'Polygon': if tally['Polygon'] > 1: valid_wkt = shapely.geometry.MultiPolygon(wkt_obj.geoms) else: valid_wkt = shapely.geometry.Polygon(wkt_obj.geoms[0]) elif predominant_type in ['MultiPoint', 'MultiLineString', 'MultiPolygon']: valid_wkt = wkt_obj.geoms[0] # Return as a GeoJSON string. geom_json = geojson.Feature(geometry=valid_wkt, properties={}) return json.dumps(geom_json['geometry'])
def to_geojson(products): """Return the products from a query response as a GeoJSON with the values in their appropriate Python types. """ feature_list = [] for i, (product_id, props) in enumerate(products.items()): props = props.copy() props['id'] = product_id poly = geomet.wkt.loads(props['footprint']) del props['footprint'] del props['gmlfootprint'] # Fix "'datetime' is not JSON serializable" for k, v in props.items(): if isinstance(v, (date, datetime)): props[k] = v.strftime('%Y-%m-%dT%H:%M:%S.%fZ') feature_list.append( geojson.Feature(geometry=poly, id=i, properties=props) ) return geojson.FeatureCollection(feature_list)
def _get_collection(elements): """Parse overpass json into geojson - multipolygon is not implemented""" features = [] for element in elements: if element.get('tags') is not None: if element.get('type') == 'node': geom = geojson.Point(coordinates=[element['lon'], element['lat']]) features.append(geojson.Feature(element['id'], geom, element['tags'])) elif element.get('type') == 'way' and (element.get("nodes")[0] == element.get("nodes")[-1]) is False: features.append(ParseOSM._geojson_feature(element, 'LineString')) elif element.get('type') == 'way' and (element.get("nodes")[0] == element.get("nodes")[-1]) is True: if any(ParseOSM._area_check(key, value) for key, value in element.get('tags').items()) is True: # create polygon feature from closed way after passing the test on polygon tags features.append(ParseOSM._geojson_feature(element, 'Polygon')) else: features.append(ParseOSM._geojson_feature(element, 'LineString')) else: print(f'problem with {element.get("type")} and id no {element["id"]}') return geojson.FeatureCollection(features)
def save_polys_as_json(polys, name): # gc = GeometryCollection(polys) geoms = {} geoms['features'] = [] geoms['crs'] = { 'properties': { 'name': 'urn:ogc:def:crs:EPSG::32611' }, 'type': 'name' } geoms['type'] = 'FeatureCollection' for i in range(len(polys)): geom_in_geojson = geojson.Feature(geometry=mapping(polys[i]), properties={}) geoms['features'].append(geom_in_geojson) with open(name, 'w') as dst: # json.dumps() json.dump(geoms, dst)
def get_tasks_as_geojson_feature_collection_no_geom(project_id): """ Creates a geoJson.FeatureCollection object for all tasks related to the supplied project ID without geometry :param project_id: Owning project ID :return: geojson.FeatureCollection """ project_tasks = \ db.session.query(Task.id, Task.x, Task.y, Task.zoom, Task.is_square, Task.task_status) \ .filter(Task.project_id == project_id).all() tasks_features = [] for task in project_tasks: task_properties = dict(taskId=task.id, taskX=task.x, taskY=task.y, taskZoom=task.zoom, taskIsSquare=task.is_square, taskStatus=TaskStatus(task.task_status).name) feature = geojson.Feature(properties=task_properties) tasks_features.append(feature) return geojson.FeatureCollection(tasks_features)