def clean_geo_data(self, s: str, to="Point", flip=True): ''' get coordinates from string data and convert to geojson object :params: s: string to clean, expecting Point or multilinestring from csv to: geojson object to convert to flip: if true, flips lon lat pairs to lat lon :return: geojson object point or multilinestring ''' if to == 'Point': cleaned = list(map(float, re.findall(r'[\-?\d\.?]+', s))) if flip: cleaned = self.flip_coords(cleaned) cleaned = [cleaned[0][0], cleaned[0][1]] return Point(cleaned) elif to == 'MultiLineString': cleaned = [] lines = re.findall(r'\((.*?)\)', s) for line in lines: this_line = list(map(float, re.findall(r'[\-?\d\.?]+', line))) if flip: cleaned.append(self.flip_coords(this_line)) else: cleaned.append(self.this_line) return MultiLineString(cleaned) else: return -1
def multilinestring(self, count_limit=100, node_limit=1000): """ Returns a geojson multilinestring object with a random number of segments and a random number of nodes """ return MultiLineString([[self.lnglat(num=randint(2, node_limit))][0] for i in range(randint(1, count_limit))])
def getgeojson(gedges,seeds): inp = [] for xx in gedges: ll1 = seeds[xx[0]]; ll2 = seeds[xx[1]]; inp.append([(ll1[0],ll1[1]),(ll2[0],ll2[1])]) with open('map0.geojson', 'w') as fdist: fdist.write(MultiLineString(inp))
def dump_flight_to_geojson(flight, geojson_filename_local): """Dumps the flight to geojson format. """ from geojson import Point, Feature, FeatureCollection, MultiPoint, MultiLineString, dump assert flight.valid #TODO write objects to the geojson form the flight object min_lat = flight.takeoff_fix.lat min_lon = flight.takeoff_fix.lon max_lat = flight.takeoff_fix.lat max_lon = flight.takeoff_fix.lon bbox = [[min_lat, min_lon], [max_lat, max_lon]] features = [] #features.append(Feature(geometry=point, properties={"country": "Spain"})) takeoff = Point((flight.takeoff_fix.lon, flight.takeoff_fix.lat)) features.append( Feature(geometry=takeoff, properties={"TakeOff": "TakeOff"})) bbox = checkbbox(flight.takeoff_fix.lat, flight.takeoff_fix.lon, bbox) landing = Point((flight.landing_fix.lon, flight.landing_fix.lat)) features.append( Feature(geometry=landing, properties={"Landing": "Landing"})) bbox = checkbbox(flight.landing_fix.lat, flight.landing_fix.lon, bbox) thermals = [] for i, thermal in enumerate(flight.thermals): # add_point(name="thermal_%02d" % i, fix=thermal.enter_fix) thermals.append((thermal.enter_fix.lon, thermal.enter_fix.lat)) # add_point(name="thermal_%02d_END" % i, fix=thermal.exit_fix) thermals.append((thermal.exit_fix.lon, thermal.exit_fix.lat)) bbox = checkbbox(thermal.enter_fix.lat, thermal.enter_fix.lon, bbox) bbox = checkbbox(thermal.exit_fix.lat, thermal.exit_fix.lon, bbox) thermals_multipoint = MultiPoint(thermals) features.append(Feature(geometry=thermals_multipoint)) route = [] for fix in flight.fixes: route.append((fix.lon, fix.lat)) bbox = checkbbox(fix.lat, fix.lon, bbox) route_multilinestring = MultiLineString([route]) features.append( Feature(geometry=route_multilinestring, properties={"Track": "Track"})) # add more features... # features.append(...) feature_collection = FeatureCollection(features) with open(geojson_filename_local, 'w') as f: dump(feature_collection, f) return bbox
def get_geo_json(coordinates): """ this function compute a geojson object from a coordinates list :param coordinates: coordinate list as input :return: geo_json object """ geo_json = MultiLineString(coordinates) return geo_json
def get_geojson(self): """Parse GPX `self.path` and return GeoJSON object.""" with open(self.path) as f: gpx = GPXParser(f).parse() tiles = list(map(str, gpx.get_map_tiles(zoom=5, expand=1))) tiles.extend(map(str, gpx.get_map_tiles(zoom=10, expand=1))) tiles.extend(map(str, gpx.get_map_tiles(zoom=14, expand=1))) return MultiLineString(gpx.get_polylines(), tiles=tiles)
def getgeojson(gedges, seeds): fdist = open('map0.geojson', 'w') inp = [] for xx in gedges: ll1 = seeds[xx[0]] ll2 = seeds[xx[1]] inp.append([(ll1[0], ll1[1]), (ll2[0], ll2[1])]) print(MultiLineString(inp), end='', file=fdist)
def test_bbox_multi_line_string(): mls = MultiLineString(( [(3.75, 9.25), (-130.95, 1.52)], [(23.15, -34.25), (-1.35, -4.65), (3.45, 77.95)], )) bb = bbox(mls) assert bb[0] == -130.95 assert bb[1] == -34.25 assert bb[2] == 23.15 assert bb[3] == 77.95
def rows_to_geojson(file_path, rows, crs): """Converts a list of rows to a geojson file of MultiLineString shapes.""" features = [] for row in rows: multi_line_string = MultiLineString([row]) row_feat = Feature(geometry=multi_line_string) features.append(row_feat) feature_collection = FeatureCollection(features, crs=crs) with open(file_path, 'w') as f: geojson.dump(feature_collection, f)
def to_geojson(self): multi_line_string = MultiLineString(tuple(map(list, self.positions))) return Feature(geometry=multi_line_string, properties={ "start_at": self.start_at, "end_at": self.end_at, "average speed": self.speed_average, "average consumption": self.consumption_km, "average consumption fuel": self.consumption_fuel_km })
def _getStreets(regions: FeatureCollection) -> MultiLineString: """Collect a set of Ways (from OSM) and convert them to a MultiLineString""" overpassQueryUrl = OSMMiner._createCollectStreetsQuery(regions) OSMMiner._lock.acquire() #write_to_log("Rate limit %d, current queries: %d \n" % (OSMMiner._rateLimit, OSMMiner._currentQueries)) write_to_log(f'OSMMiner._OSMServerURL: {OSMMiner._OSMServerURL}') #write_to_log(f'overpassQueryUrl: {overpassQueryUrl}') while OSMMiner._currentQueries >= OSMMiner._rateLimit: time.sleep(1) OSMMiner._waitForAvailableSlots() OSMMiner._currentQueries += 1 ##DEBUG #print("added query: %d\n" % OSMMiner._currentQueries) OSMMiner._lock.release() jsonString = requests.get(overpassQueryUrl).content OSMMiner._currentQueries -= 1 ##DEBUG #print("removed query: %d\n" % OSMMiner._currentQueries) try: #TODO: Treat cases in which the OSM server fails osmResult = OSMResult.fromJsonString(jsonString) except: write_to_log("Error while parsing overpass message. Message sample: %s" % jsonString[:100]) raise AttributeError("Invalid jsonString") streetSegments = {} # Data needs to be sorted before being grouped, otherwise # the same group may appear multiple times data = sorted(osmResult.Ways.values(), key=lambda x: x.tags.get('name')) g = groupby(data, lambda x: x.tags.get('name')) for streetName, group in g: nodesList = [x.nodes for x in group] OSMMiner._mergeWays(nodesList) if streetName in streetSegments: streetSegments[streetName] = streetSegments[streetName] + nodesList else: streetSegments[streetName] = nodesList featuresList = [] for streetName in streetSegments: featuresList.append( Feature(id=streetName, properties={'name':streetName}, geometry=MultiLineString([LineString([ Point([osmResult.Nodes[n].lon, osmResult.Nodes[n].lat]) for n in s]) for s in streetSegments[streetName]])) ) return FeatureCollection(featuresList, crs=OSMMiner._crs)
def test_dumps_multiline3D(self): from chsdi.esrigeojsonencoder import dumps as esri_dumps from geojson import MultiLineString point = MultiLineString([[ (600000, 200000, 555), (650000, 250000, 555) ], [(700000, 230000, 666), (800000, 340000, 666)]], properties={'name': 'toto'}) result = esri_dumps(point) self.assertEqual( result, '{"paths": [[[600000, 200000, 555], [650000, 250000, 555]], [[700000, 230000, 666], [800000, 340000, 666]]], "spatialReference": {"wkid": 21781}, "hasZ": true, "attributes": {"name": "toto"}}' )
def generate_grid(self, cells): coords = [] # columns for x in linspace(self.min_x, self.max_x, num=cells): coords.append([(x, self.min_y), (x, self.max_y)]) # rows for y in linspace(self.min_y, self.max_y, num=cells): coords.append([(self.min_x, y), (self.max_x, y)]) prop = {'type': 'grid', 'color': "#999", 'opacity': 0.3, 'weight': 2} return Feature(geometry=MultiLineString(coords), properties=prop)
def writeToGeojson(self, filepath): features = [] for u, v, d in self.Graph.edges(data=True): f = Feature(geometry=MultiLineString([[u, v]]), properties={"weight": d['weight']}) features.append(f) routes = FeatureCollection(features) fd = open(filepath, "w") fd.write(dumps(routes, sort_keys=True)) fd.close()
def convert_to_geojson(gpx): features = [] for track in gpx.tracks: lines = [] for segment in track.segments: lines.append([(p.longitude, p.latitude, p.elevation) for p in segment.points]) features.append( Feature(geometry=MultiLineString(lines), properties={ 'name': track.name, 'description': track.description, 'comment': track.comment })) return FeatureCollection(features)
def generate_timed_geojson_for_line(self, coordinates, time, load, allocationEnergy, id, es_id, carrier_id, min_en, max_en): my_feature = Feature(geometry=MultiLineString(coordinates)) my_feature['properties']['id'] = id my_feature['properties']['time'] = time my_feature['properties']['load'] = allocationEnergy my_feature['properties']['stroke'] = self.colors[es_id+carrier_id]["color"] if allocationEnergy < 0: val = 10*fabs(allocationEnergy/min_en) + 3 my_feature['properties']['pos'] = False else: val = 10*allocationEnergy/max_en + 3 my_feature['properties']['pos'] = True # val = 10*((fabs(allocationEnergy) - min_en)/(max_en - min_en)) + 3 my_feature['properties']['strokeWidth'] = val return my_feature
def line_offset(geojson: Feature, distance: float, unit: str = "km") -> Feature: """ Takes a linestring or multilinestring and returns a line at offset by the specified distance. :param geojson: input GeoJSON :param distance: distance to offset the line (can be of negative value) :param unit: Units in which distance to be calculated, values can be 'deg', 'rad', 'mi', 'km', default is 'km' :return: Line feature offset from the input line Example: >>> from geojson import MultiLineString, Feature >>> from turfpy.transformation import line_offset >>> ls = Feature(geometry=MultiLineString([ ... [(3.75, 9.25), (-130.95, 1.52)], ... [(23.15, -34.25), (-1.35, -4.65), (3.45, 77.95)] ... ])) >>> line_offset(ls, 2, unit='mi') """ if not geojson: raise Exception("geojson is required") if not distance: raise Exception("distance is required") type = get_type(geojson) properties = geojson.get("properties", {}) if type == "LineString": return line_offset_feature(geojson, distance, unit) elif type == "MultiLineString": coords = [] def callback_flatten_each(feature, feature_index, multi_feature_index): nonlocal coords coords.append( line_offset_feature(feature, distance, unit).geometry.coordinates) return True flatten_each(geojson, callback_flatten_each) return Feature(geometry=MultiLineString(coords), properties=properties)
def all_roads(): rows = [] #session_all.set_keyspace("road_geoloc") #rows = session_all.execute("SELECT * FROM header") array = [] for row in rows: coords = row[1].split('\'')[0] if len(coords) is not 0: coord = [] listofpairs = coords.split(';') for entry in listofpairs: lat, lon = entry.split(',') coord.append((float(lat), float(lon))) array.append(coord) formated = MultiLineString(array) my_feature = Feature(geometry=formated) coll = FeatureCollection([my_feature]) print("Show all roads") return jsonify(coll)
def feature_collection_from_streets(values, street_tree, collection_title): """ Transformation of information about streets into gejson FeatureCollection. Parameters ---------- values : list of lists List of information about one street of each region in following format: [measured value, code of street, name of street, name of town, name of region] db_tree : xml.etree.cElementTree cElementTree of xml database about contry addresses collection_title : string What name the resulting FeatureCollection should have. Returns ------- geojson.FeatureCollection FeatureCollection containing MultiLines made from information about streets from `values`. """ streets_collection = [] for region_street in values: street_positions = street_tree.getroot().findall( ".//Ulice[@kod='" + region_street[1] + "']/Geometrie/PosList") lines = parse_street_lines(street_positions) mls = MultiLineString(lines) length = multi_segment_length(lines) street_feature = Feature(geometry=mls, properties={ 'name': region_street[Street.street_name], 'town': region_street[Street.town_name], 'region': region_street[Street.region_name], 'length': length, 'measured': region_street[Street.measured] }, id=int(region_street[Street.code])) streets_collection.append(street_feature) return FeatureCollection(collection_title, streets_collection)
def run(self): """ Finds all streets with names related to death and converts information about them to geojson FeatureCollection. It goes through the xml tree and looks for streets whose name contains words like 'Fallen' or 'Graveyard'. The locations of these streets are converted into geojson FeatureCollection. Returns ------- geojson.FeatureCollection FeatureCollection containing Lines of morbid streets """ streets = [] town_names = { obec.get("kod"): obec.find("Nazev").text for obec in self.db_tree.iter('Obec') } for street in self.db_tree.findall(".//Ulice"): street_name = street.find('Nazev').text if self.__profanity.match(street_name): street_id = int(street.get('kod')) town_id = street.get('obec') segments = parse_street_lines( street.findall('Geometrie/PosList')) mls = MultiLineString(segments) street_feature = Feature(geometry=mls, properties={ 'name': street_name, 'town': town_names[town_id], 'style': { 'stroke': 'black' } }, id=street_id) streets.append(street_feature) return FeatureCollection(self.__title, streets)
def get(self, format): #168036.0, 404958.0 #(168036.0, 404958.0) (168038.83662185463, 404948.41075725335) lng = self.get_argument('lng') lat = self.get_argument('lat') altitude = self.get_argument('altitude') radius = self.get_argument('radius', 1000) abs_altitude = self.get_argument('abs_altitude', False) try: lng, lat, altitude, radius = map(float, (lng, lat, altitude, radius)) except Exception: raise tornado.web.HTTPError(400) radius = CoordSystem.pixel_per_meter((lng, lat))*radius #meters -> pixels print 'Getting viewshed at lng: {}, lat: {}, altitude: {}, radius:{}'.format(lng, lat, altitude, radius) center = CoordSystem.lnglat_to_pixel((lng, lat)) sampler = TileSampler(url_template=options.tile_template) #add relative altitude offset if not abs_altitude: offset = yield sampler.sample_pixel(center) else: offset = 0 line_segments = [] for start, stop in generate_line_segments(radius, center): elevations, pixels = yield sampler.sample_line(start, stop) if elevations is None: continue #if no data found skip it line_segments.extend(iter_to_runs(generate_visible(altitude+offset, elevations), pixels)) if len(line_segments) == 0: raise tornado.web.HTTPError(404, "No elevation data was found for query") line_segments = [[CoordSystem.pixel_to_lnglat(coord) for coord in segment] for segment in line_segments] self.write_api_response(format, Feature(geometry=MultiLineString(line_segments), properties={ "calculationAltitude":altitude, "calculationRaduis":float(self.get_argument('radius', 1000)), "calculationLat":lat, "calculationLng":lng, "uiMapCenter":line_segments[0][0], "uiPopupContent":"Viewshed at {} meters above sea level".format(altitude) }))
def process_frame_geojson(u, v, long, lat, autoscale=True, scale=0.5): """ Generates arrows in a GeoJSON format from vector field data. Arguments: u ([time:y:x]): Eastward direction component of vector. v ([time:y:x]): Northward direction component of vector. long ([int]): Array of longitude values. long ([int]): Array of latitude values. autoscale (boolean) (default: True): If True, scale arrows according to magnitude. If False, All arrows have the same size. scale (float) (default: 0.5): Arrow scale in coordinates relative to arrow magnitude if not autoscaled. """ arrows = [] length = 0 for i in range(len(lat)): for j in range(len(long)): arrows.append( calc_arrow(u[i][j], v[i][j], long[j], lat[i], autoscale, scale)) return MultiLineString(arrows)
def get_yyc_bounds(self): ''' Gets max/min lat and lon to plot city boundary ''' yyc_map = pd.read_csv('./data/City_Boundary_layer.csv') geom = yyc_map.the_geom[0] geom = MultiLineString( self.clean_geo_data(geom, 'MultiLineString', True)) ne = [float('-inf'), float('-inf')] sw = [float('+inf'), float('+inf')] # find maximum /minumum coords for lines in geom['coordinates']: for point in lines: if point[0] > ne[0]: ne[0] = point[0] if point[1] > ne[1]: ne[1] = point[1] if point[0] < sw[0]: sw[0] = point[0] if point[1] < sw[1]: sw[1] = point[1] return (ne, sw)
def _split_by_date_line(self, line): # Split an input linestring in EPSG:4326 against the -180/180 date line xp, yp = line[0] sn = self._get_shift(xp) sp = sn multilines = [[self._xy2lonlat(xp + sn, yp)]] m = 0 for i in range(1, len(line)): xn, yn = line[i] sn = self._get_shift(xn) if sn != sp: m += 1 multilines.append([]) yi = self._get_interpolated_y(xp, yp, xn, yn) if xn > xp: multilines[m - 1].append(self._xy2lonlat(self.W, yi)) multilines[m].append(self._xy2lonlat(0, yi)) else: multilines[m - 1].append(self._xy2lonlat(0, yi)) multilines[m].append(self._xy2lonlat(self.W, yi)) sp = sn xp, yp = xn, yn multilines[m].append(self._xy2lonlat(xn + sn, yn)) return MultiLineString(multilines)
def add_geojson(self, json_ld): """ adds geospatial and event data that links time and space information """ uuid = self.manifest.uuid item_type = self.manifest.item_type geo_meta = self.geo_meta event_meta = self.event_meta features_dict = False # dict of all features to be added feature_events = False # mappings between features and time periods if geo_meta is not False: # print('here!' + str(geo_meta)) features_dict = LastUpdatedOrderedDict() feature_events = LastUpdatedOrderedDict() for geo in geo_meta: geo_id = geo.feature_id geo_node = '#geo-' + str( geo_id) # the node id for database rec of the feature geo_node_geom = '#geo-geom-' + str(geo_id) geo_node_props = '#geo-props-' + str(geo_id) geo_node_derived = '#geo-derived-' + str( geo_id) # node id for a derived feature geo_node_derived_geom = '#geo-derived-geom-' + str(geo_id) geo_node_derived_props = '#geo-derived-props-' + str(geo_id) feature_events[geo_node] = [] geo_props = LastUpdatedOrderedDict() geo_props['href'] = URImanagement.make_oc_uri( uuid, item_type, self.cannonical_uris) geo_props['type'] = geo.meta_type if len(geo.note) > 0: geo_props['note'] = geo.note if uuid != geo.uuid: geo_props['reference-type'] = 'inferred' geo_props['reference-uri'] = URImanagement.make_oc_uri( geo.uuid, 'subjects', self.cannonical_uris) rel_meta = self.item_gen_cache.get_entity(geo.uuid) if rel_meta is not False: geo_props['reference-label'] = rel_meta.label geo_props['reference-slug'] = rel_meta.slug else: geo_props['reference-label'] = self.manifest.label geo_props['reference-type'] = 'specified' if self.assertion_hashes: geo_props['hash_id'] = geo.hash_id geo_props['feature_id'] = geo.feature_id if geo.specificity < 0 and self.manifest.item_type != 'projects': # case where we've got reduced precision geospatial data # geotile = quadtree.encode(geo.latitude, geo.longitude, abs(geo.specificity)) geo_props['location-precision'] = abs(geo.specificity) geo_props[ 'location-precision-note'] = 'Location data approximated as a security precaution.' gmt = GlobalMercator() geotile = gmt.lat_lon_to_quadtree(geo.latitude, geo.longitude, abs(geo.specificity)) tile_bounds = gmt.quadtree_to_lat_lon(geotile) item_polygon = Polygon([[(tile_bounds[1], tile_bounds[0]), (tile_bounds[1], tile_bounds[2]), (tile_bounds[3], tile_bounds[2]), (tile_bounds[3], tile_bounds[0]), (tile_bounds[1], tile_bounds[0])] ]) item_f_poly = Feature(geometry=item_polygon) item_f_poly.id = geo_node_derived item_f_poly.geometry.id = geo_node_derived_geom item_f_poly.properties.update(geo_props) item_f_poly.properties['location-note'] = 'This region defines the '\ 'approximate location for this item.' item_f_poly.properties['id'] = geo_node_derived_props features_dict[geo_node_derived] = item_f_poly item_point = Point( (float(geo.longitude), float(geo.latitude))) item_f_point = Feature(geometry=item_point) item_f_point.id = geo_node item_f_point.geometry.id = geo_node_geom item_f_point.properties.update(geo_props) item_f_point.properties['location-note'] = 'This point defines the center of the '\ 'region approximating the location for this item.' item_f_point.properties['id'] = geo_node_props features_dict[geo_node] = item_f_point elif len(geo.coordinates) > 1: # here we have geo_json expressed features and geometries to use if geo.specificity < 0: geo_props[ 'location-precision-note'] = 'Location data approximated as a security precaution.' elif geo.specificity > 0: geo_props[ 'location-precision-note'] = 'Location data has uncertainty.' else: geo_props['location-precision-note'] = 'Location data available with no '\ 'intentional reduction in precision.' item_point = Point( (float(geo.longitude), float(geo.latitude))) item_f_point = Feature(geometry=item_point) item_f_point.properties.update(geo_props) if uuid == geo.uuid: #the item itself has the polygon as it's feature item_db = Point( (float(geo.longitude), float(geo.latitude))) if geo.ftype == 'Polygon': coord_obj = json.loads(geo.coordinates) item_db = Polygon(coord_obj) elif (geo.ftype == 'MultiPolygon'): coord_obj = json.loads(geo.coordinates) item_db = MultiPolygon(coord_obj) elif (geo.ftype == 'MultiLineString'): coord_obj = json.loads(geo.coordinates) item_db = MultiLineString(coord_obj) item_f_db = Feature(geometry=item_db) item_f_db.id = geo_node item_f_db.geometry.id = geo_node_geom item_f_db.properties.update(geo_props) item_f_db.properties['id'] = geo_node_props features_dict[geo_node] = item_f_db item_f_point.id = geo_node_derived item_f_point.geometry.id = geo_node_derived_geom item_f_point.properties['location-region-note'] = 'This point represents the center of the '\ 'region defining the location of this item.' item_f_point.properties['id'] = geo_node_derived_props features_dict[geo_node_derived] = item_f_point else: #the item is contained within another item with a polygon or multipolygon feature item_f_point.id = geo_node item_f_point.geometry.id = geo_node_geom item_f_point.properties['id'] = geo_node_props item_f_point.properties['contained-in-region'] = True item_f_point.properties['location-region-note'] = 'This point represents the center of the '\ 'region containing this item.' features_dict[geo_node] = item_f_point else: # case where the item only has a point for geo-spatial reference geo_props[ 'location-note'] = 'Location data available with no intentional reduction in precision.' item_point = Point( (float(geo.longitude), float(geo.latitude))) item_f_point = Feature(geometry=item_point) item_f_point.id = geo_node item_f_point.geometry.id = geo_node_geom item_f_point.properties.update(geo_props) item_f_point.properties['id'] = geo_node_props features_dict[geo_node] = item_f_point if event_meta is not False: # events provide chrological information, tied to geo features # sometimes there are more than 1 time period for each geo feature # in such cases, we duplicate geo features and add the different time event # information to the new features for event in event_meta: rel_feature_num = 1 # default to the first geospatial feature for where the event happened rel_feature_node = False if event.feature_id > 0: rel_feature_num = event.feature_id if rel_feature_num >= 1: rel_feature_node = '#geo-' + str(rel_feature_num) act_event_obj = LastUpdatedOrderedDict() act_event_obj = self.add_when_json(act_event_obj, uuid, item_type, event) if rel_feature_node is not False and feature_events is not False: feature_events[rel_feature_node].append(act_event_obj) if features_dict is not False: if feature_events is not False: for node_key, event_list in feature_events.items(): # update the feature with the first event "when" information if len(event_list) > 0: features_dict[node_key].update(event_list[0]) event_i = 1 for event in event_list: if event_i <= 1: # add the time info to the feature old_feature = features_dict[node_key] old_geo_id = old_feature.geometry['id'] old_prop_id = old_feature.properties['id'] features_dict[node_key].update(event) else: act_feature = copy.deepcopy(old_feature) # now add new node ids for the new features created to for the event new_node = node_key + '-event-' + str( event_i) act_feature.id = new_node act_feature.geometry[ 'id'] = old_geo_id + '-event-' + str( event_i) act_feature.properties[ 'id'] = old_prop_id + '-event-' + str( event_i) act_feature.update( event ) # add the time info to the new feature features_dict[new_node] = act_feature del (act_feature) event_i += 1 feature_keys = list(features_dict.keys()) if len(feature_keys) < 1: del features_dict[feature_keys[0]][ 'id'] # remove the conflicting id # only 1 feature, so item is not a feature collection json_ld.update(features_dict[feature_keys[0]]) else: feature_list = [ ] # multiple features, so item has a feature collection for node_key, feature in features_dict.items(): feature_list.append(feature) item_fc = FeatureCollection(feature_list) json_ld.update(item_fc) return json_ld
def visualisation(permutation, iti_matrix, mydf, j_piece): j_list = [] # draw folium graph str_fea_list = [] tooltip = 'Click For More Info' des_dict = { 'WALK': 'Walk to ', 'SUBWAY': 'Take subway to ', 'BUS': 'Take bus to ' } m = folium.Map(location=[1.2791, 103.8154], zoom_start=12) for i in range(len(permutation) - 1): # for one itinerary sta_plc_idx = permutation[i] end_plc_idx = permutation[i + 1] itinerary = iti_matrix[sta_plc_idx][end_plc_idx] true_sta_pt = np.array((mydf._get_value(sta_plc_idx, 'latitude'), mydf._get_value(sta_plc_idx, 'longitude'))) true_end_pt = np.array((mydf._get_value(end_plc_idx, 'latitude'), mydf._get_value(end_plc_idx, 'longitude'))) temp_num_legs = len(itinerary['legs']) # num of legs pt_lat = [] pt_lon = [] tpl_list = [] pt_name = [] mode_list = [] dist_list = [] for k in range(temp_num_legs): # for each leg pt_lon.append(itinerary['legs'][k]['from']['lon']) pt_lat.append(itinerary['legs'][k]['from']['lat']) tpl_list.append((itinerary['legs'][k]['from']['lon'], itinerary['legs'][k]['from']['lat'])) pt_name.append(itinerary['legs'][k]['to']['name']) mode_list.append(des_dict[itinerary['legs'][k]['mode']]) dist_list.append( str(round(float(itinerary['legs'][k]['distance']) / 1000, 2)) + ' km.') if k == temp_num_legs - 1: pt_lon.append(itinerary['legs'][k]['to']['lon']) pt_lat.append(itinerary['legs'][k]['to']['lat']) tpl_list.append((itinerary['legs'][k]['to']['lon'], itinerary['legs'][k]['to']['lat'])) temp_feature = Feature(geometry=MultiLineString([tpl_list]), properties={'stroke': '#AF4646'}) str_fea_list.append(temp_feature) first_point = np.array((pt_lat[0], pt_lon[0])) distance1 = np.linalg.norm(first_point - true_sta_pt) distance2 = np.linalg.norm(first_point - true_end_pt) start_point = [pt_lat[0], pt_lon[0]] end_point = [pt_lat[-1], pt_lon[-1]] iterator = range(len(mode_list)) # only affect formatting the text string = '' if distance1 > distance2: iterator = range(len(mode_list) - 1, -1, -1) start_point = [pt_lat[-1], pt_lon[-1]] end_point = [pt_lat[0], pt_lon[0]] counter = 0 for j in iterator: string += str(counter+1)+'. ' + \ mode_list[j]+pt_name[j] + \ '. Estimated distance is '+dist_list[j]+'\n' counter += 1 folium.Marker( start_point, popup='<strong>' + string + '</strong>', tooltip=tooltip, icon=folium.Icon(icon='trophy' if i != 0 else 'flag')).add_to(m), folium.Marker(end_point, icon=folium.Icon(icon='trophy' if i != len(permutation) - 2 else 'star')).add_to(m) temp_j_ele = {} temp_j_ele['order'] = i + 1 temp_j_ele['poi_name'] = mydf._get_value(end_plc_idx, 'poi_name') temp_j_ele['time_to_spend'] = mydf._get_value(end_plc_idx, 'time') temp_j_ele['time_to_travelhere'] = str( timedelta(seconds=int(itinerary['duration']))) temp_j_ele['description'] = mydf._get_value(end_plc_idx, 'description') j_list.append(temp_j_ele) j = {'basics': j_piece, 'itinerary': j_list} j_file = json.dumps(j) feature_collection = FeatureCollection(str_fea_list) ms = geojson.dumps(feature_collection) folium.GeoJson(ms, name='multistring').add_to(m) # Generate map render_m = m.get_root().render() # insert value into map_database map_engine = map_db_init() insert_req = "INSERT INTO map_db (id,map_html) VALUES (default," + \ "'"+render_m+"')" cursor = map_engine.cursor() cursor.execute(insert_req) map_engine.commit() return j_file
def merge_linestrings(linestrings: Iterable[LineString]) -> MultiLineString: linestrings_coords = [] for linestring in linestrings: linestrings_coords.append(linestring["geometry"]["coordinates"]) return MultiLineString(linestrings_coords)
import pandas as pd from geojson import MultiLineString import scipy.io datafiles = [ 'Zone1_Soln_Route.mat', 'Zone4_Soln_Route.mat', 'Zone5_Soln_Route.mat' ] points_arr = [] for dfile in datafiles: df = scipy.io.loadmat(dfile)['route_long_lat'] df_clip = pd.DataFrame([df[i] for i in range(len(df)) if df[i][0] != 0]) points = [(df_clip.iloc[i, 0], df_clip.iloc[i, 1]) for i in range(df_clip.shape[0])] points_arr.append(points) route_str = MultiLineString(points_arr) f = open("routes.json", "w") f.write(str(route_str)) f.close()
def visualisation(permutation, iti_matrix, mydf): # draw folium graph str_fea_list = [] tooltip = 'Click For More Info' des_dict = { 'WALK': 'Walk to ', 'SUBWAY': 'Take subway to ', 'BUS': 'Take bus to ' } m = folium.Map(location=[1.2791, 103.8154], zoom_start=12) for i in range(len(permutation) - 1): # for one itinerary sta_plc_idx = permutation[i] end_plc_idx = permutation[i + 1] itinerary = iti_matrix[sta_plc_idx][end_plc_idx] true_sta_pt = np.array((mydf._get_value(sta_plc_idx, 'latitude'), mydf._get_value(sta_plc_idx, 'longitude'))) true_end_pt = np.array((mydf._get_value(end_plc_idx, 'latitude'), mydf._get_value(end_plc_idx, 'longitude'))) temp_num_legs = len(itinerary['legs']) # num of legs pt_lat = [] pt_lon = [] tpl_list = [] pt_name = [] mode_list = [] dist_list = [] for k in range(temp_num_legs): # for each leg pt_lon.append(itinerary['legs'][k]['from']['lon']) pt_lat.append(itinerary['legs'][k]['from']['lat']) tpl_list.append((itinerary['legs'][k]['from']['lon'], itinerary['legs'][k]['from']['lat'])) pt_name.append(itinerary['legs'][k]['to']['name']) mode_list.append(des_dict[itinerary['legs'][k]['mode']]) dist_list.append( str(round(float(itinerary['legs'][k]['distance']) / 1000, 2)) + ' km.') if k == temp_num_legs - 1: pt_lon.append(itinerary['legs'][k]['to']['lon']) pt_lat.append(itinerary['legs'][k]['to']['lat']) tpl_list.append((itinerary['legs'][k]['to']['lon'], itinerary['legs'][k]['to']['lat'])) temp_feature = Feature(geometry=MultiLineString([tpl_list]), properties={'stroke': '#AF4646'}) str_fea_list.append(temp_feature) first_point = np.array((pt_lat[0], pt_lon[0])) distance1 = np.linalg.norm(first_point - true_sta_pt) distance2 = np.linalg.norm(first_point - true_end_pt) start_point = [pt_lat[0], pt_lon[0]] end_point = [pt_lat[-1], pt_lon[-1]] iterator = range(len(mode_list)) # only affect formatting the text string = '' if distance1 > distance2: iterator = range(len(mode_list) - 1, -1, -1) start_point = [pt_lat[-1], pt_lon[-1]] end_point = [pt_lat[0], pt_lon[0]] counter = 0 for j in iterator: string += str(counter + 1) + '. ' + mode_list[j] + pt_name[ j] + '. Estimated distance is ' + dist_list[j] + '\n' counter += 1 folium.Marker( start_point, popup='<strong>' + string + '</strong>', tooltip=tooltip, icon=folium.Icon(icon='trophy' if i != 0 else 'flag')).add_to(m), folium.Marker(end_point, icon=folium.Icon(icon='trophy' if i != len(permutation) - 2 else 'star')).add_to(m) feature_collection = FeatureCollection(str_fea_list) ms = geojson.dumps(feature_collection) folium.GeoJson(ms, name='multistring').add_to(m) # Generate map render_m = m.get_root().render() return render_m
def result_to_geojson(result, task, flight, second_interval=5): """Dumps the flight to geojson format used for mapping. Contains tracklog split into pre SSS, pre Goal and post goal parts, thermals, takeoff/landing, result object, waypoints achieved, and bounds second_interval = resolution of tracklog. default one point every 5 seconds. regardless it will keep points where waypoints were achieved. returns the Json string.""" from collections import namedtuple from route import rawtime_float_to_hms, distance from geojson import Point, Feature, FeatureCollection, MultiLineString features = [] takeoff_landing = [] thermals = [] infringements = [] point = namedtuple('fix', 'lat lon') min_lat = flight.fixes[0].lat min_lon = flight.fixes[0].lon max_lat = flight.fixes[0].lat max_lon = flight.fixes[0].lon bbox = [[min_lat, min_lon], [max_lat, max_lon]] takeoff = Point((flight.takeoff_fix.lon, flight.takeoff_fix.lat)) takeoff_landing.append( Feature(geometry=takeoff, properties={"TakeOff": "TakeOff"})) landing = Point((flight.landing_fix.lon, flight.landing_fix.lat)) takeoff_landing.append( Feature(geometry=landing, properties={"Landing": "Landing"})) for thermal in flight.thermals: thermals.append(( thermal.enter_fix.lon, thermal.enter_fix.lat, f'{thermal.vertical_velocity():.1f}m/s gain:{thermal.alt_change():.0f}m' )) pre_sss = [] pre_goal = [] post_goal = [] waypoints_achieved = [] # if the pilot did not make goal, goal time will be None. set to after end of track to avoid issues. goal_time = flight.fixes[ -1].rawtime + 1 if not result.goal_time else result.goal_time # if the pilot did not make SSS then it will be 0, set to task start time. SSS_time = task.start_time if result.SSS_time == 0 else result.SSS_time if len(result.waypoints_achieved) > 0: for idx, tp in enumerate(result.waypoints_achieved): time = ("%02d:%02d:%02d" % rawtime_float_to_hms(tp.rawtime + task.time_offset)) achieved = [ tp.lon, tp.lat, tp.altitude, tp.name, tp.rawtime, time, f'<b>{tp.name}</b> <br>' f'alt: <b>{tp.altitude:.0f} m.</b><br>' f'time: <b>{time}</b>' ] if idx > 0: current = point(lon=tp.lon, lat=tp.lat) previous = point(lon=waypoints_achieved[-1][0], lat=waypoints_achieved[-1][1]) straight_line_dist = distance(previous, current) / 1000 time_taken = (tp.rawtime - waypoints_achieved[-1][4]) time_takenHMS = rawtime_float_to_hms(time_taken) speed = straight_line_dist / (time_taken / 3600) achieved.append(round(straight_line_dist, 2)) achieved.append("%02d:%02d:%02d" % time_takenHMS) achieved.append(round(speed, 2)) else: achieved.extend([0, "0:00:00", '-']) waypoints_achieved.append(achieved) lastfix = flight.fixes[0] for fix in flight.fixes: bbox = checkbbox(fix.lat, fix.lon, bbox) keep = False if (fix.rawtime >= lastfix.rawtime + second_interval or any(tp for tp in result.waypoints_achieved if tp.rawtime == fix.rawtime) or any(tp for tp in result.infringements if int(tp['rawtime']) == fix.rawtime)): '''keep fixes that validate a turnpoint or cause an infringement''' ### # print(f'rawtime: {fix.rawtime}') keep = True lastfix = fix if keep: if fix.rawtime <= SSS_time: pre_sss.append((fix.lon, fix.lat, fix.gnss_alt, fix.press_alt)) if SSS_time <= fix.rawtime <= goal_time: pre_goal.append( (fix.lon, fix.lat, fix.gnss_alt, fix.press_alt)) if len(pre_goal) == 1: '''adding fix to pre_sss to link polylines''' pre_sss.append(pre_goal[0]) if fix.rawtime >= goal_time: post_goal.append( (fix.lon, fix.lat, fix.gnss_alt, fix.press_alt)) if len(post_goal) == 1: '''adding fix to pre_goal to link polylines''' pre_goal.append(post_goal[0]) route_multilinestring = MultiLineString([pre_sss]) features.append( Feature(geometry=route_multilinestring, properties={"Track": "Pre_SSS"})) route_multilinestring = MultiLineString([pre_goal]) features.append( Feature(geometry=route_multilinestring, properties={"Track": "Pre_Goal"})) route_multilinestring = MultiLineString([post_goal]) features.append( Feature(geometry=route_multilinestring, properties={"Track": "Post_Goal"})) tracklog = FeatureCollection(features) '''airspace infringements''' if result.infringements: for entry in result.infringements: time = ("%02d:%02d:%02d" % rawtime_float_to_hms(entry['rawtime'] + task.time_offset)) infringements.append([ entry['lon'], entry['lat'], int(entry['alt']), entry['space'], int(entry['distance']), entry['separation'], int(entry['rawtime']), time ]) return tracklog, thermals, takeoff_landing, bbox, waypoints_achieved, infringements