"""Tests for filter.py""" import unittest from geojson import Feature, Polygon, LineString from label_maker.filter import create_filter, _compile, _compile_property_reference, \ _compile_comparison_op, _compile_logical_op, _compile_in_op, _compile_has_op, \ _compile_negation, _stringify line_geometry = LineString([(0, 0), (1, 1)]) polygon_geometry = Polygon([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]) class TestCompiledFilters(unittest.TestCase): """Tests for compiled filter functions""" def test_comparison(self): """Test comparison filter function""" ff = create_filter(['==', 'a', 5]) passing = Feature(geometry=line_geometry, properties=dict(a=5)) failing = Feature(geometry=line_geometry, properties=dict(a=4)) self.assertTrue(ff(passing)) self.assertFalse(ff(failing)) def test_any(self): """Test any filter function""" ff = create_filter(['any', ['==', 'a', 5], ['==', 'b', 3]]) passing1 = Feature(geometry=line_geometry, properties=dict(a=5)) passing2 = Feature(geometry=line_geometry, properties=dict(b=3)) passing3 = Feature(geometry=line_geometry, properties=dict(a=5, b=3)) failing1 = Feature(geometry=line_geometry, properties=dict(a=4)) failing2 = Feature(geometry=line_geometry, properties=dict(b=5)) self.assertTrue(ff(passing1))
def write_geojson_traj(records, GEOJSON_PATH, roadnet_data, roads, graph, CAR_ID, date): routing_thres = 30 """ # separate data by gid with open(FILE_NAME_GPS_CLEAN, 'rb') as f: reader = csv.reader(f) headers = reader.next() records = list(reader) #print "cleaned records from last step", len(records) records_origin = np.array(records) records_compress = [] iter1 = 0 while iter1 < len(records_origin): gid = records_origin[iter1,5] iter2 = iter1+1 while iter2 < len(records_origin) and gid == records_origin[iter2,5]: iter2 = iter2 + 1 records_compress.append(records_origin[iter1:iter2,:].tolist()) iter1 = iter2 records = [] for item in records_compress: records.append(item[0]) if len(item) > 1: records.append(item[-1]) #print "compressed records", len(records) records = records_origin.tolist() """ points_in_lines = [] time_pre = int(records[0][0]) occupy_pre = int(records[0][4]) gid_pre = int(records[0][5]) percent_pre = float(records[0][6]) cnt = 1 while cnt < len(records): #current info time_current = int(records[cnt][0]) occupy_current = int(records[cnt][4]) gid_current = int(records[cnt][5]) percent_current = float(records[cnt][6]) points_in_line = [] #print "==================================== new records ==========================================================" #print "gid_current,gid_pre,percent_current,percent_pre,roadnet_data[int(gid_current) - 1][16],time_pre,time_current" #print gid_current,gid_pre,percent_current,percent_pre,roadnet_data[int(gid_current) - 1][16],time_pre,time_current routing_points, dist = gen_points_from_routing(gid_current, gid_pre, percent_current, percent_pre, roadnet_data, roads, graph, routing_thres) if occupy_current == 1 and occupy_pre == 0: pick_or_drop = 2 elif occupy_current == 0 and occupy_pre == 1: pick_or_drop = -2 elif occupy_current == 1 and occupy_pre == 1: pick_or_drop = 1 else: pick_or_drop = 0 time_diff = time_current - time_pre if dist < 40 * time_diff and len(routing_points) > 0: for item in routing_points: points_in_line.append(tuple(item)) time_string = datetime.datetime.utcfromtimestamp( time_current + 3600 * 8).strftime('%Y-%m-%d %H:%M:%S') points_in_lines.append( Feature(geometry=LineString(points_in_line), properties={ 'ABSTIME': time_current, 'TIME': time_string, 'OSM_ID': int(roadnet_data[int(gid_current) - 1][16]), 'GID': gid_current, 'Percent': percent_current, 'GID_pre': gid_pre, 'Percent_pre': percent_pre, 'Pick_or_drop': pick_or_drop, "ABS_TIME_PRE": time_pre, "Dist": dist })) #points_in_line_pre = points_in_line #print time_current,time_string #print repr(np.array(routing_points)) """ if time_current == 1417406075: ##print "gid_pre,percent_pre,gid_current,percent_current" ##print gid_pre,percent_pre,gid_current,percent_current ##print repr(np.array(routing_points)) return """ #mark the pre info time_pre = time_current occupy_pre = occupy_current gid_pre = gid_current percent_pre = percent_current elif time_diff < 300 and cnt > 2: records.pop(cnt) records.pop(cnt - 1) if len(points_in_lines) > 0: points_in_lines.pop() time_pre = int(records[cnt - 2][0]) occupy_pre = int(records[cnt - 2][4]) gid_pre = int(records[cnt - 2][5]) percent_pre = float(records[cnt - 2][6]) cnt = cnt - 2 elif cnt < len(records) - 1: time_pre = int(records[cnt + 1][0]) occupy_pre = int(records[cnt + 1][4]) gid_pre = int(records[cnt + 1][5]) percent_pre = float(records[cnt + 1][6]) cnt += 1 else: break cnt += 1 #print "len(points_in_lines)",len(points_in_lines) points_in_lines_new = reconnect_points_in_lines(points_in_lines, roadnet_data, roads, graph, date) geom_in_geojson = FeatureCollection( points_in_lines_new) # doctest: +ELLIPSIS with open(GEOJSON_PATH + 'one_car_traj_' + str(CAR_ID) + '.geojson', 'w') as outfile: geojson.dump(geom_in_geojson, outfile) print "write geojson done!", ",", GEOJSON_PATH + 'one_car_traj_' + str( CAR_ID) + '.geojson' return points_in_lines
def plot_flows(fdf, map_f=None, min_flow=0, tiles='cartodbpositron', zoom=6, flow_color='red', opacity=0.5, flow_weight=5, flow_exp=0.5, style_function=flow_style_function, flow_popup=False, num_od_popup=5, tile_popup=True, radius_origin_point=5, color_origin_point='#3186cc'): """ :param fdf: FlowDataFrame `FlowDataFrame` to visualize. :param map_f: folium.Map `folium.Map` object where the flows will be plotted. If `None`, a new map will be created. :param min_flow: float only flows larger than `min_flow` will be plotted. :param tiles: str folium's `tiles` parameter. :param zoom: int initial zoom. :param flow_color: str color of the flow edges :param opacity: float opacity (alpha level) of the flow edges. :param flow_weight: float weight factor used in the function to compute the thickness of the flow edges. :param flow_exp: float weight exponent used in the function to compute the thickness of the flow edges. :param style_function: lambda function GeoJson style function. :param flow_popup: bool if `True`, when clicking on a flow edge a popup window displaying information on the flow will appear. :param num_od_popup: int number of origin-destination pairs to show in the popup window of each origin location. :param tile_popup: bool if `True`, when clicking on a location marker a popup window displaying information on the flows departing from that location will appear. :param radius_origin_point: float size of the location markers. :param color_origin_point: str color of the location markers. :return: `folium.Map` object with the plotted flows. """ if map_f is None: # initialise map lon, lat = np.mean(np.array( list( fdf.tessellation.geometry.apply( utils.get_geom_centroid).values)), axis=0) map_f = folium.Map(location=[lat, lon], tiles=tiles, zoom_start=zoom) mean_flows = fdf[constants.FLOW].mean() O_groups = fdf.groupby(by=constants.ORIGIN) for O, OD in O_groups: geom = fdf.get_geometry(O) lonO, latO = utils.get_geom_centroid(geom) for D, T in OD[[constants.DESTINATION, constants.FLOW]].values: if O == D: continue if T < min_flow: continue geom = fdf.get_geometry(D) lonD, latD = utils.get_geom_centroid(geom) gjc = LineString([(lonO, latO), (lonD, latD)]) fgeojson = folium.GeoJson(gjc, name='geojson', style_function=style_function( T / mean_flows, flow_color, opacity, flow_weight, flow_exp)) if flow_popup: popup = folium.Popup('flow from %s to %s: %s' % (O, D, int(T)), max_width=300) fgeojson = fgeojson.add_child(popup) fgeojson.add_to(map_f) if radius_origin_point > 0: for O, OD in O_groups: name = 'origin: %s' % O.replace('\'', '_') T_D = [[ T, D ] for D, T in OD[[constants.DESTINATION, constants.FLOW]].values] trips_info = '<br/>'.join(["flow to %s: %s" % (dd.replace('\'', '_'), int(tt)) \ for tt, dd in sorted(T_D, reverse=True)[:num_od_popup]]) geom = fdf.get_geometry(O) lonO, latO = utils.get_geom_centroid(geom) fmarker = folium.CircleMarker([latO, lonO], radius=radius_origin_point, weight=2, color=color_origin_point, fill=True, fill_color=color_origin_point) if tile_popup: popup = folium.Popup(name + '<br/>' + trips_info, max_width=300) fmarker = fmarker.add_child(popup) fmarker.add_to(map_f) return map_f
if isinstance(line[1], float): continue else: string_split = line[1].split(',') if len(string_split) == 1: continue if occupy_current == 1 and occupy_pre == 0: pick_or_drop = 2 elif occupy_current == 0 and occupy_pre == 1: pick_or_drop = -2 elif occupy_current == 1 and occupy_pre == 1: pick_or_drop = 1 else: pick_or_drop = 0 for item in string_split: long, lat = item.split(' ') points_in_line.append((float(long), float(lat))) points_in_lines.append( Feature(geometry=LineString(points_in_line), properties={ 'Time': line[0], 'ABS_TIME': line[2], 'ABS_TIME_PRE': time_pre, 'Pick_or_drop': pick_or_drop })) time_pre = time_current occupy_pre = occupy_current geom_in_geojson = FeatureCollection(points_in_lines) # doctest: +ELLIPSIS with open('./Result/one_car_traj.geojson', 'w') as outfile: geojson.dump(geom_in_geojson, outfile)
def createLine(): return LineString([createPoint() for unused in range(numberVertices)])
wind_data = sorted(wind_data, key=itemgetter('posix_time')) current_date = date.today() current_location = Point(35.908199, -75.668230) current_feature = 'First Feature' all_features = [] for record in wind_data: rdate = date(int(record['year']), int(record['month']), int(record['day'])) if rdate != current_date: if current_feature != 'First Feature': all_features.append(current_feature) current_feature = Feature( geometry=LineString([(current_location[1], current_location[0])]), properties={'date': rdate.strftime('%B %d, %Y')}) current_date = rdate wind_speed = float(record['wind_speed']) wind_dir = record['wind_direction'] if wind_dir != '': bearing = (int(wind_dir) + 180) % 360 else: bearing = 0 next_location = distance(miles=wind_speed).destination( current_location, bearing) current_feature['geometry']['coordinates'].append( [next_location[1], next_location[0]])
def test_point_to_line_distance(): point = Feature(geometry=Point((0, 0))) linestring = Feature(geometry=LineString([(1, 1), (-1, 1)])) pld = point_to_line_distance(point, linestring, units="mi") assert round(pld, 4) == 69.0934
def add_external_idf_formatted_catalogue(self, cat, ll_deltas=0.01, delta_t=dt.timedelta(seconds=30), utc_time_zone=dt.timezone(dt.timedelta(hours=0)), buff_t=dt.timedelta(seconds=0), buff_ll=0, use_ids=False, logfle=False): """ This merges an external catalogue formatted in the ISF format e.g. a catalogue coming from an external agency. Because of this, we assume that each event has a single origin. :param cat: An instance of :class:`ISFCatalogue` i.e. the 'guest' catalogue :param ll_deltas: A float defining the tolerance in decimal degrees used when looking for colocated events :param delta_t: Tolerance used to find colocated events. It's an instance of :class:`datetime.timedelta` :param utc_time_zone: A :class:`datetime.timezone` instance describing the reference timezone for the new catalogue. :param buff_t: Tolerance used to find events close to the selection threshold. It's an instance of :class:`datetime.timedelta` :param buff_ll: A float defining the tolerance used to find events close to the selection threshold. :param use_ids: A boolean :param logfle: Name of the file which will contain the log of the processing :return: - A list with the indexes of the events in the 'guest' catalogue added to the 'host' catalogue. - A dictionary with doubtful events. The keys in this dictionary are the indexes of the events in the 'host' catalogue. The values are the indexes of the doubtful events in the 'guest' catalogue. """ if logfle: fou = open(logfle, 'w') fname_geojson = os.path.splitext(logfle)[0]+"_secondary.geojson" # # This is a dictionary where we store the doubtful events. doubts = {} # # Check if we have a spatial index assert 'sidx' in self.__dict__ # # Set delta time thresholds if hasattr(delta_t, '__iter__'): threshold = np.array([[t[0], t[1].total_seconds()] for t in delta_t]) else: threshold = np.array([[1000, delta_t.total_seconds()]]) # # Set ll delta thresholds if hasattr(ll_deltas, '__iter__'): ll_deltas = np.array([d for d in ll_deltas]) else: ll_deltas = np.array([[1000, ll_deltas]]) # # Processing the events in the catalogue 'guest' catalogue id_common_events = [] features = [] new = 0 new_old = 0 common = 0 common_old = 0 iloc = 0 for iloc, event in enumerate(cat.events): if logfle: msg = 'Index: {:d} Event ID: {:s}\n'.format(iloc, event.id) fou.write(msg) # # Initial settings found = False before = self.get_number_events() # # Updating time of the origin to the new timezone new_datetime = dt.datetime.combine(event.origins[0].date, event.origins[0].time, tzinfo=utc_time_zone) new_datetime = new_datetime.astimezone(self.timezone) event.origins[0].date = new_datetime.date() event.origins[0].time = new_datetime.time() # # Set the datetime of the event dtime_a = dt.datetime.combine(event.origins[0].date, event.origins[0].time) # # Take the appropriate value from delta_ll - this is needed in # particular when delta_ll varies with time. idx_threshold = max(np.argwhere(dtime_a.year > ll_deltas[:, 0])) ll_thrs = ll_deltas[idx_threshold, 1] # # Create selection window minlo = event.origins[0].location.longitude - ll_thrs minla = event.origins[0].location.latitude - ll_thrs maxlo = event.origins[0].location.longitude + ll_thrs maxla = event.origins[0].location.latitude + ll_thrs # # Querying the spatial index obj = [n.object for n in self.sidx.intersection((minlo, minla, maxlo, maxla), objects=True)] # # This is for checking. We perform the check only if the buffer # distance is larger than 0 obj_e = [] obj_a = [] if buff_ll > 0 or buff_t.seconds > 0: obj_a = [n.object for n in self.sidx.intersection(( minlo-buff_ll, minla-buff_ll, maxlo+buff_ll, maxla+buff_ll), objects=True)] obj_b = [n.object for n in self.sidx.intersection(( minlo+buff_ll, minla+buff_ll, maxlo-buff_ll, maxla+buff_ll), objects=True)] # # Find the index of the events in the buffer across the # selection window obj_e = list(set(obj_a) - set(obj_b)) # # Find the appropriate delta_time idx_threshold = max(np.argwhere(dtime_a.year > threshold[:, 0])) sel_thrs = threshold[idx_threshold, 1] if logfle: msg = ' Selected {:d} events \n'.format(len(obj)) fou.write(msg) if len(obj): # # Checking the events selected with the spatial index. obj is # a list of tuples (event and origin ID) in the host # catalogue for the epicenters close to the investigated event for i in obj: # # Selecting the origin of the event found in the catalogue i_eve = i[0] i_ori = i[1] orig = self.events[i_eve].origins[i_ori] dtime_b = dt.datetime.combine(orig.date, orig.time) # # Check if time difference is within the threshold value delta = abs((dtime_a - dtime_b).total_seconds()) if logfle: eid = self.events[i_eve].id msg = ' Event ID: {:s}\n'.format(eid) msg += ' Delta: {:f}\n'.format(delta) fou.write(msg) if delta < sel_thrs and found is False: # Found an origin in the same space-time window found = True tmp = event.origins # Check this event already contains an origin from # the same agency origins = self.events[i_eve].origins if tmp[0].author in [o.author for o in origins]: fmt = "This event already contains " fmt += " an origin from the same agency: {:s}\n" fmt += " Trying to add evID {:s}\n" msg = fmt.format(tmp[0].author, event.id) warnings.warn(msg) if logfle: fou.write(msg) # Set prime solution is necessary if (len(self.events[i_eve].origins) == 1 and not self.events[i_eve].origins[0].is_prime): tmp[0].is_prime = True else: tmp[0].is_prime = False # Check event ID if use_ids: if event.id != self.events[i_eve].id: fmt = " Trying to add a secondary origin " fmt += " whose ID {:s} differs from the " fmt += " original one. Skipping\n" msg = fmt.format(event.id, self.events[i_eve].id) warnings.warn(msg) found = False continue # Check if a secondary solution from the same agency # exists authors = [m.author for m in self.events[i_eve].magnitudes] if event.magnitudes[0].author in authors: print("Solution already included for this source") print(event.magnitudes[0].origin_id) found = False continue # Info fmt = "Adding to event {:d}\n" msg = fmt.format(i_eve) # Updating the .geojson file if logfle: fou.write(msg) lon1 = self.events[i_eve].origins[0].location.longitude lat1 = self.events[i_eve].origins[0].location.latitude lon2 = tmp[0].location.longitude lat2 = tmp[0].location.latitude line = LineString([(lon1, lat1), (lon2, lat2)]) ide = self.events[i_eve].id features.append(Feature(geometry=line, properties={"originalID": ide})) # Merging a secondary origin self.events[i_eve].merge_secondary_origin(tmp) id_common_events.append(iloc) common += 1 break # # Searching for doubtful events: if buff_ll > 1e-10 and buff_t.seconds > 1e-10: if len(obj_a) > 0: for i in obj_a: to_add = False # # Selecting origin of the event found in the catalogue i_eve = i[0] i_ori = i[1] orig = self.events[i_eve].origins[i_ori] dtime_b = dt.datetime.combine(orig.date, orig.time) # # Check if time difference within the threshold value tmp_delta = abs(dtime_a - dtime_b).total_seconds() # # Within max distance and across the time buffer tsec = buff_t.total_seconds() if (tmp_delta > (sel_thrs - tsec) and tmp_delta < (sel_thrs + tsec)): to_add = True # # Within max time and within the ll buffer if (not to_add and tmp_delta < (sel_thrs + tsec)): if i in obj_e: to_add = True # # Saving info if to_add: if i[0] in doubts: doubts[i[0]].append(iloc) else: doubts[i[0]] = [iloc] # # Adding new event if not found: # Making sure that the ID of the event added does not exist # already if event.id in set(self.ids): if use_ids: fmt = "Adding a new event whose ID {:s}" fmt += " is already in the DB. Making it secondary." msg = fmt.format(event.id) warnings.warn(msg) if logfle: fou.write(msg) i_eve = np.where(np.array(self.ids) == event.id) tmp = event.origins tmp[0].is_prime = False self.events[i_eve[0][0]].merge_secondary_origin(tmp) found = 1 common += 1 else: fmt = 'Event ID: {:s} already there. Length ids {:d}' msg = fmt.format(event.id, len(self.ids)) raise ValueError(msg) else: assert len(event.origins) == 1 event.origins[0].is_prime = True self.events.append(event) if logfle: msg = "Adding new event\n" fou.write(msg) self.ids.append(event.id) new += 1 # # Checking if (new - new_old) > 0 and (common - common_old > 0): msg = '{:d}'.format(iloc) raise ValueError(msg) elif (new - new_old) > 1: msg = 'New increment larger than 1, iloc {:d}'.format(iloc) raise ValueError(msg) elif (common - common_old) > 1: msg = 'Common increment larger than 1, iloc {:d}'.format(iloc) raise ValueError(msg) else: new_old = new common_old = common # # after = self.get_number_events() # # # if not iloc % 5000: # idxs, stats = self.get_prime_events_info() # num_primes = [len(stats[k]) for k in stats.keys()] # msg = "{:d}".format(iloc) # assert sum(num_primes) == after, msg fmt = 'before {:d} after {:d} iloc {:d} found {:d} loops: {:d}' msg = fmt.format(before, after, iloc, found, iloc) dlt = 0 if found else 1 assert before+dlt == after, msg # # Checking fmt = "Wrong budget \n" fmt += "Common: {:d} New: {:d} Sum: {:d} Expected: {:d} loops: {:d}\n" msg = fmt.format(common, new, common+new, cat.get_number_events(), iloc+1) assert (common + new) == cat.get_number_events(), msg # # Updating the spatial index self._create_spatial_index() if logfle: fou.close() feature_collection = FeatureCollection(features) with open(fname_geojson, 'w') as f: dump(feature_collection, f) return id_common_events, doubts
def key_value_to_geojson(values): points = [(value[0], value[1]) for value in values if value[0] and value[0]] return Feature(geometry=LineString(points))
def multimodal_directions(origin, destination, modes, API_KEY): # Store GeoJSON features in a list results = [] # Store durations and start / stop times durations = [] starttimes = [] endtimes = [] for mode in modes: # Get data from Google Maps Directions API data = gmaps_directions(origin, destination, mode, API_KEY) # Check to see if no routes returned. if len(data['routes']) == 0: sys.exit( "Sorry, directions are not available for {} from {} to {}". format(mode, origin, destination)) # Get duration in seconds if 'duration_in_traffic' in data['routes'][0]['legs'][0]: duration = data['routes'][0]['legs'][0]['duration_in_traffic'][ 'value'] else: duration = data['routes'][0]['legs'][0]['duration']['value'] # Calculate arrival time arrival_time = departure_time + timedelta(0, duration) # Get polyline polyline = data['routes'][0]['overview_polyline']['points'] # Decode polyline decoded_polyline = decode_polyline(polyline) # Create LineString linestring = LineString(decoded_polyline) # Create GeoJSON properties properties = { 'mode': mode, 'duration': duration, 'start': departure_time.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3], 'end': arrival_time.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] } # Create GeoJSON feature feature = Feature(geometry=linestring, properties=properties) # Store feature in results list results.append(feature) # Store duration and start/stop times in lists durations.append(duration) starttimes.append(departure_time) endtimes.append(arrival_time) # Convert list of features to GeoJSON FeatureCollection feature_collection = FeatureCollection(results) return feature_collection, durations, starttimes, endtimes
def to_geojson_feature(self): center_line = zip(map(int, self.center_line.x), map(int, self.center_line.y)) return Feature(geometry=LineString(center_line), id=self.id)
geo_ = ','.join([str(elem) for elem in geo]) geo_string.append(geo_) list_geometry = ';'.join([str(elem) for elem in geo_string]) url = 'http://ivolab:5000/match/v1/driving/' + list_geometry + '?steps=false&geometries=geojson&overview=full&annotations=false&tidy=true' response = requests.get(url) # for i in range(len(geometry=response.json()['matchings'])): # geometry = [response.json['matchings'][i]['geometry']['coordinates'] # for i in range(len(response.json['matchings'][i]))] geooo= [] for i in range(len(response.json()['matchings'])): geooo.append(response.json()['matchings'][i]['geometry']['coordinates']) geometry = LineString([y for x in geooo for y in x]) for i in range(len(response.json()['matchings'])): for j in range(len(response.json()['matchings'][i]['legs'])): distance += response.json()['matchings'][i]['legs'][j]['distance'] prop = { "country": "Singapore" } geometryJSON = Feature(geometry = geometry, properties = prop) # geometryJSON = FeatureCollection([geometryJSON], crs = crs) with open("mapmatched/test6.geojson", "w") as file: dump(geometryJSON, file)
df_stops.to_csv("{}.csv".format(args.route), index=False) # shapes c.execute( """SELECT shape_id, COUNT(*) count FROM trips WHERE route_id = %s GROUP BY shape_id ORDER BY count DESC""", (route_id, )) trips = c.fetchall() for trip in trips: shape_id = trip['shape_id'] print(shape_id) c.execute( """SELECT * FROM shapes WHERE shape_id = %s ORDER BY shape_pt_sequence ASC""", (shape_id, )) sequence = c.fetchall() points = [] for pt in sequence: points.append( Point((float(pt['shape_pt_lon']), float(pt['shape_pt_lat'])))) linestring = Feature(geometry=LineString(points), properties={'name': args.route}) features = geo_stops.copy() features.append(linestring) feature_collection = FeatureCollection(features) with open("{}_{}.geojson".format(args.route, shape_id), "w") as f: dump(feature_collection, f, indent=2)
print("Invalid number of arguments used!") print("Usage: objtogeojson.py <input OBJ file> <output GeoJSON file>") sys.exit() input_file = sys.argv[1] output_file = sys.argv[2] vertices = [] features = [] for line in open(input_file, "r").readlines(): split_line = line.strip().split(" ") identifier = split_line[0] data = split_line[1:] if identifier == "v": vertex = [float(val) for val in data] vertices.append(vertex) features.append(Feature(geometry=Point(tuple(vertex)))) if identifier == "f": line = tuple( [vertices[index - 1] for index in [int(val) for val in data]]) features.append(Feature(geometry=LineString(line))) feature_collection = FeatureCollection(features) with open(output_file, 'w') as f: dump(feature_collection, f)
def write_geojson(df, filename=None, geomtype='linestring', drop_na=True): """ convert dataframe with coords series to geojson format :param df: target dataframe :param filename: optional path of new file to contain geojson :param geomtype: geometry type [linestring, point, polygon] :param drop_na: whether to remove properties with None values :return: geojson.FeatureCollection >>> from swmmio.examples import philly >>> geoj = write_geojson(philly.links.dataframe, drop_na=True) >>> print(json.dumps(geoj['features'][0]['properties'], indent=2)) { "InletNode": "J1-025", "OutletNode": "J1-026", "Length": 309.456216, "Roughness": 0.014, "InOffset": 0, "OutOffset": 0.0, "InitFlow": 0, "MaxFlow": 0, "Shape": "CIRCULAR", "Geom1": 1.25, "Geom2": 0, "Geom3": 0, "Geom4": 0, "Barrels": 1, "Name": "J1-025.1" } >>> print(json.dumps(geoj['features'][0]['geometry'], indent=2)) { "type": "LineString", "coordinates": [ [ 2746229.223, 1118867.764 ], [ 2746461.473, 1118663.257 ] ] } """ # CONVERT THE DF INTO JSON df['Name'] = df.index # add a name column (we wont have the index) records = json.loads(df.to_json(orient='records')) # ITERATE THROUGH THE RECORDS AND CREATE GEOJSON OBJECTS features = [] for rec in records: coordinates = rec['coords'] del rec['coords'] # delete the coords so they aren't in the properties if drop_na: rec = {k: v for k, v in rec.items() if v is not None} latlngs = coordinates if geomtype == 'linestring': geometry = LineString(latlngs) elif geomtype == 'point': geometry = Point(latlngs) elif geomtype == 'polygon': geometry = Polygon([latlngs]) feature = Feature(geometry=geometry, properties=rec) features.append(feature) if filename is not None: with open(filename, 'wb') as f: f.write(json.dumps(FeatureCollection(features))) return filename else: return FeatureCollection(features)
def get_geojson_geometry(self): coordinates = [] for p in self.points: coordinates.append( (p.latitude, p.longitude)) # Keep reversed for folium return LineString(coordinates)
def frame_to_geojson(frames): points = [(frame[0], frame[1]) for frame in frames if frame[0] and frame[0]] return Feature(geometry=LineString(points))
def get_geojson_geometry(self): coordinates = [] coordinates.append((self.coord1[0], self.coord1[1])) coordinates.append((self.coord2[0], self.coord2[1])) return LineString(coordinates)
def test_length(): ls = LineString([(115, -32), (131, -22), (143, -25), (150, -34)]) lens = length(ls, units="mi") assert round(lens, 4) == 2738.9664
import googleMapsPolyline from geojson import LineString import pyperclip c1 = "wb{rGyiza@~@[fC}@`@MdA_@n@QnAa@NGLCRGjAa@LE@?JEbA]\\UV[p@{@V_@HMn@}@HOp@eAJQRJL@ZCLEFCDCJCJEBAHE@?`DmAJEDALEZMdCcA\\Mf@WVKRKDAr@WfAc@PGp@WDAJELELGj@Ul@WZG`@QDECWWiDYiDAKCWOmBAOAGAMEa@McBGm@Eq@CWAIAMCg@EWC_@E]KwACQC[C[ASCYAGGy@AIAKASCWGm@AQ?AEc@RUX]nAuA|BgCHKRSBEv@_ABCFIDGDEFKHIDE^c@FIf@i@@ATYNOLODEhAsAbAeAJMJORS@AJOTW@AJKLQFE`@c@DGHIPUHIBCHI^e@HIBCBEDEHIFIzBiCFWF]n@iEZaCXoB@KBKDY?CBM@KDY?Q?G@YAMCMGUGUACI[s@}BEMGUIWeBcGCMEKGQIYmAcEqAqEGUEMI[EKi@kBCGGQoD_M[eAy@iCWk@a@y@a@s@wAgCgAsBS_@mBgDoBmD_@_AyCeFk@k@mTq]}@yAaA{AIMaEwGeC}DsBoDyB_De@{@g@_Ae@oA]iAQi@EMKYKOGIKGIEICUGYOWOYa@_CiD]g@eCmDEGMSOSQWGIMSg@w@??y@mAk@{@CEa@m@uA{Be@aACGKUU_@k@y@MSEIEGEGGK]i@KMY]qGsKk@wAQYIKAEKQCIAKBQDGJKVM\\GxBUv@]j@e@f@}@ReABaBI_AU{BMkAO_A?ACOAMESAKKa@IUcBwDWk@?CS[WY]S]Mu@I_@Ca@A_@GUGQKUMEGQMKOQWISIQK[Mc@GYAICKCOAC?EQcAe@oCWqAGy@Ak@?_@@Q?UDkA?M@]@[?AAQ?MA[AWAI]kDEw@ASAs@EsA?KAg@Ca@Es@M}@Ky@u@wFOgA[gCCMAG?Gk@cEScBIYMS_@g@_@YQUKc@Ge@CMAKE[[mBKy@WoH?_@@i@HaB?O@ADy@BIHq@Ba@@QB[?QF?HEFK@I?A?KAGCIGGGCE@AIAWAqHEi@q@cKC_@k@qIW@CB_Az@E@KHk@T[L" c1Coords = googleMapsPolyline.decode(c1) pyperclip.copy(LineString(c1Coords)) print(LineString(c1Coords))
def linestring(self, node_limit=1000): """ Returns a geojson linestring object with a random number of nodes """ return LineString( [self.lnglat()[0] for i in range(randint(2, node_limit))])
def createLine(): coords = [] for i in range(0, numberVertices): coords.append((randomLon(), randomLat())) return LineString(coords)
def processPolylines(): encoder = gpolyencode.GPolyEncoder() json_data = open('data.txt') datadir = os.path.join(os.getcwd(), 'data') gtfsdir = os.path.join(datadir, 'gtfs') geojsondir = os.path.join(datadir, 'geojson') polydir = os.path.join(datadir, 'polylines') data = json.load(json_data, object_hook=_decode_dict) # pprint(data) json_data.close() with open(gtfsdir + "/shapes.txt", 'wb') as shapesfile: shapeswriter = csv.writer(shapesfile) shapeswriter.writerow([ "shape_id", "shape_pt_sequence", "shape_dist_traveled", "shape_pt_lon", "shape_pt_lat" ]) for trip, stops in data.items(): print trip legpoints = [] jsonpoints = [] for i in range(20): filepath = os.path.join(polydir, trip + "_" + str(i) + ".json") if (os.path.exists(filepath)): gmaps = open(filepath) linedata = json.load(gmaps) print trip + "_" + str(i) if args.dir == 'goog': if linedata['status'] != "OK": continue print linedata['routes'][0]['overview_polyline'][ 'points'] points = decode(linedata['routes'][0] ['overview_polyline']['points']) # print points for point in points: dictpoint = {'x': point[0], 'y': point[1]} legpoints.append(dictpoint) gmaps.close() elif args.dir == 'osrm': if linedata['code'] != "Ok": continue print linedata['routes'][0]['geometry'] points = decode(linedata['routes'][0]['geometry']) # print points for point in points: dictpoint = { 'x': point[0] / 10, 'y': point[1] / 10 } legpoints.append(dictpoint) gmaps.close() elif args.dir == 'mapbox': if linedata['code'] != "Ok": continue print linedata['routes'][0]['geometry'] points = decode(linedata['routes'][0]['geometry']) # print points for point in points: dictpoint = { 'x': point[0] / 10, 'y': point[1] / 10 } legpoints.append(dictpoint) gmaps.close() # print legpoints # print ls.geojson if not legpoints: continue else: simplified = simplify(legpoints, .0002, True) # print "new:" + str(simplified) count = 0 for point in simplified: jsonpoints.append((point['x'], point['y'])) shppoint = [point['x'], point['y']] shppoint.insert(0, trip) shppoint.insert(1, count) shppoint.insert(2, "") shapeswriter.writerow(shppoint) count += 1 ls = LineString(jsonpoints) gc = GeometryCollection([ls]) gtfsfile = os.path.join(geojsondir, trip + '.geojson') with open(gtfsfile, 'wb') as tripgeo: geojson.dump(gc, tripgeo)
for section in section_of_lines: sol = SectionOfLine(start={'id': section.find('SOLOPStart').get('Value')}, end={'id': section.find('SOLOPEnd').get('Value')}) start_op_point = tree.find( './/OperationalPoint/UniqueOPID[@Value="%s"]/...' % sol.start['id']) end_op_point = tree.find( './/OperationalPoint/UniqueOPID[@Value="%s"]/...' % sol.end['id']) if start_op_point == None or end_op_point == None: print('no start or end op point for section of line: %s' % sol) continue start_op_point_coords = start_op_point.find('OPGeographicLocation') end_op_point_coords = start_op_point.find('OPGeographicLocation') sol.start['coords'] = (float( start_op_point_coords.get('Longitude').replace(',', '.')), float( start_op_point_coords.get('Latitude').replace( ',', '.'))) sol.end['coords'] = (float( end_op_point_coords.get('Longitude').replace(',', '.')), float( end_op_point_coords.get('Latitude').replace( ',', '.'))) line_strings.append(LineString([sol.start['coords'], sol.end['coords']])) geometry_collection = GeometryCollection(line_strings) print(geojson.dumps(geometry_collection))
def model_to_networkx(model, drop_cycles=True): from swmmio.utils.dataframes import create_dataframeINP, create_dataframeRPT ''' Networkx MultiDiGraph representation of the model ''' from geojson import Point, LineString try: import networkx as nx except ImportError: raise ImportError('networkx module needed. get this package here: ', 'https://pypi.python.org/pypi/networkx') def multidigraph_from_edges(edges, source, target): ''' create a MultiDiGraph from a dataframe of edges, using the row index as the key in the MultiDiGraph ''' us = edges[source] vs = edges[target] keys = edges.index data = edges.drop([source, target], axis=1) d_dicts = data.to_dict(orient='records') G = nx.MultiDiGraph() G.add_edges_from(zip(us, vs, keys, d_dicts)) return G # parse swmm model results with swmmio, concat all links into one dataframe nodes = model.nodes() if model.rpt is not None: inflow_cols = [ 'MaxLatInflow', 'MaxTotalInflow', 'LatInflowV', 'TotalInflowV', 'FlowBalErrorPerc' ] flows = create_dataframeRPT(model.rpt.path, "Node Inflow Summary")[inflow_cols] nodes = nodes.join(flows) conduits = model.conduits() links = pd.concat( [conduits, model.orifices(), model.weirs(), model.pumps()], sort=True) links['facilityid'] = links.index # create a nx.MultiDiGraph from the combined model links, add node data, set CRS G = multidigraph_from_edges(links, 'InletNode', target='OutletNode') G.add_nodes_from(zip(nodes.index, nodes.to_dict(orient='records'))) # create geojson geometry objects for each graph element for u, v, k, coords in G.edges(data='coords', keys=True): if coords: G[u][v][k]['geometry'] = LineString(coords) for n, coords in G.nodes(data='coords'): if coords: G.node[n]['geometry'] = Point(coords[0]) if drop_cycles: # remove cycles cycles = list(nx.simple_cycles(G)) if len(cycles) > 0: print('cycles detected and removed: {}'.format(cycles)) G.remove_edges_from(cycles) G.graph['crs'] = model.crs return G
def line_to_json_rep(line: SlfLine) -> LineString: ps = list_point_tuples(line) return LineString(ps)
def reconnect_points_in_lines(points_in_lines, roadnet_data, roads, graph, date): routing_thres_reconnect = 60 date_zero_oclock = int( time.mktime(datetime.datetime.strptime(date, "%Y_%m_%d").timetuple())) #print datetime.datetime.utcfromtimestamp(date_zero_oclock + 3600*8).strftime('%Y-%m-%d %H:%M:%S') points_in_lines_final = [] if len(points_in_lines) == 0: return [] cnt_pre = 0 while points_in_lines[cnt_pre]["properties"][ "ABS_TIME_PRE"] < date_zero_oclock and cnt_pre < len( points_in_lines) - 1: cnt_pre += 1 if len(points_in_lines) - cnt_pre < 2: return [] gid_pre = points_in_lines[cnt_pre]["properties"]["GID"] percent_pre = points_in_lines[cnt_pre]["properties"]["Percent"] time_pre_current = points_in_lines[cnt_pre]["properties"]["ABSTIME"] points_in_lines_final.append(points_in_lines[cnt_pre]) for cnt in range(1 + cnt_pre, len(points_in_lines)): gid_current = points_in_lines[cnt]["properties"]["GID_pre"] percent_current = points_in_lines[cnt]["properties"]["Percent_pre"] time_current_pre = points_in_lines[cnt]["properties"]["ABS_TIME_PRE"] time_current_current = points_in_lines[cnt]["properties"]["ABSTIME"] pick_or_drop = points_in_lines[cnt]["properties"]["Pick_or_drop"] if time_current_pre - time_pre_current > 0 and time_current_pre - time_pre_current < 1800: #print "time_pre_current,time_current_pre",time_pre_current,time_current_pre points_in_line = [] routing_points, dist = gen_points_from_routing( gid_current, gid_pre, percent_current, percent_pre, roadnet_data, roads, graph, routing_thres_reconnect) time_diff = time_current_pre - time_pre_current if dist < 40 * time_diff: for item in routing_points: points_in_line.append(tuple(item)) time_string = datetime.datetime.utcfromtimestamp( time_current_pre + 3600 * 8).strftime('%Y-%m-%d %H:%M:%S') points_in_lines_final.append( Feature(geometry=LineString(points_in_line), properties={ 'ABSTIME': time_current_pre, 'TIME': time_string, 'OSM_ID': int(roadnet_data[int(gid_current) - 1][16]), 'GID': gid_current, 'Percent': percent_current, 'GID_pre': gid_pre, 'Percent_pre': percent_pre, 'Pick_or_drop': pick_or_drop, "ABS_TIME_PRE": time_pre_current, "IS_FILL": 1 })) #print points_in_lines_final[-1] else: #print points_in_lines_final[-1]["geometry"]["coordinates"][-1] time_string = datetime.datetime.utcfromtimestamp( time_current_pre + 3600 * 8).strftime('%Y-%m-%d %H:%M:%S') points_in_lines_final.append( Feature(geometry=LineString([ points_in_lines_final[-1]["geometry"]["coordinates"] [-1] ]), properties={ 'ABSTIME': time_current_pre, 'TIME': time_string, 'OSM_ID': int(roadnet_data[int(gid_current) - 1][16]), 'GID': gid_current, 'Percent': percent_current, 'Pick_or_drop': pick_or_drop, "ABS_TIME_PRE": time_pre_current, "IS_FILL": 1 })) elif time_current_pre != time_current_pre: #print points_in_lines_final[-1]["geometry"]["coordinates"][-1] time_string = datetime.datetime.utcfromtimestamp( time_current_pre + 3600 * 8).strftime('%Y-%m-%d %H:%M:%S') points_in_lines_final.append( Feature(geometry=LineString([ points_in_lines_final[-1]["geometry"]["coordinates"][-1] ]), properties={ 'ABSTIME': time_current_pre, 'TIME': time_string, 'OSM_ID': int(roadnet_data[int(gid_current) - 1][16]), 'GID': gid_current, 'Percent': percent_current, 'Pick_or_drop': pick_or_drop, "ABS_TIME_PRE": time_pre_current, "IS_FILL": 1 })) points_in_lines_final.append(points_in_lines[cnt]) if time_current_pre == time_pre_current and cnt > 1 and len( points_in_lines[cnt]["geometry"]["coordinates"]) == 0: points_in_lines_final.pop() time_string = datetime.datetime.utcfromtimestamp( time_current_pre + 3600 * 8).strftime('%Y-%m-%d %H:%M:%S') points_in_lines_final.append( Feature(geometry=LineString([ points_in_lines_final[-1]["geometry"]["coordinates"][-1] ]), properties={ 'ABSTIME': time_current_pre, 'TIME': time_string, 'OSM_ID': int(roadnet_data[int(gid_current) - 1][16]), 'GID': gid_current, 'Percent': percent_current, 'Pick_or_drop': pick_or_drop, "ABS_TIME_PRE": time_pre_current, "IS_FILL": 1 })) gid_pre = points_in_lines[cnt]["properties"]["GID"] percent_pre = points_in_lines[cnt]["properties"]["Percent"] time_pre_current = time_current_current if len(points_in_lines_final) > 0: if points_in_lines_final[0]["properties"][ "ABS_TIME_PRE"] > date_zero_oclock: time_string = datetime.datetime.utcfromtimestamp( points_in_lines_final[0]["properties"]["ABS_TIME_PRE"] + 3600 * 8).strftime('%Y-%m-%d %H:%M:%S') points_in_lines_final.insert( 0, Feature( geometry=LineString([ points_in_lines_final[0]["geometry"]["coordinates"][0] ]), properties={ 'ABSTIME': points_in_lines_final[0]["properties"]["ABS_TIME_PRE"], 'TIME': time_string, 'OSM_ID': points_in_lines_final[0]["properties"]["OSM_ID"], 'GID': points_in_lines_final[0]["properties"]["GID"], 'Percent': points_in_lines_final[0]["properties"]["Percent"], 'Pick_or_drop': points_in_lines_final[0]["properties"]["Pick_or_drop"], "ABS_TIME_PRE": date_zero_oclock })) return points_in_lines_final
def plot_trajectory(tdf, map_f=None, max_users=10, max_points=1000, style_function=traj_style_function, tiles='cartodbpositron', zoom=12, hex_color=-1, weight=2, opacity=0.75): """ :param tdf: TrajDataFrame TrajDataFrame to be plotted. :param map_f: folium.Map `folium.Map` object where the trajectory will be plotted. If `None`, a new map will be created. :param max_users: int maximum number of users whose trajectories should be plotted. :param max_points: int maximum number of points per user to plot. If necessary, a user's trajectory will be down-sampled to have at most `max_points` points. :param style_function: lambda function function specifying the style (weight, color, opacity) of the GeoJson object. :param tiles: str folium's `tiles` parameter. :param zoom: int initial zoom. :param hex_color: str or int hex color of the trajectory line. If `-1` a random color will be generated for each trajectory. :param weight: float thickness of the trajectory line. :param opacity: float opacity (alpha level) of the trajectory line. :return: `folium.Map` object with the plotted trajectories. """ # group by user and keep only the first `max_users` nu = 0 for user, df in tdf.groupby(constants.UID): if nu >= max_users: break nu += 1 traj = df[[constants.LONGITUDE, constants.LATITUDE]] if max_points == None: di = 1 else: di = max(1, len(traj) // max_points) traj = traj[::di] if nu == 1 and map_f == None: # initialise map center = list(np.median(traj, axis=0)[::-1]) map_f = folium.Map(location=center, zoom_start=zoom, tiles=tiles) line = LineString(traj.values.tolist()) if hex_color == -1: color = get_color(hex_color) else: color = hex_color tgeojson = folium.GeoJson(line, name='tgeojson', style_function=style_function( weight, color, opacity)) tgeojson.add_to(map_f) return map_f
def plot_trajectory(tdf, map_f=None, max_users=10, max_points=1000, style_function=traj_style_function, tiles='cartodbpositron', zoom=12, hex_color=-1, weight=2, opacity=0.75, start_end_markers=True): """ :param tdf: TrajDataFrame TrajDataFrame to be plotted. :param map_f: folium.Map `folium.Map` object where the trajectory will be plotted. If `None`, a new map will be created. :param max_users: int maximum number of users whose trajectories should be plotted. :param max_points: int maximum number of points per user to plot. If necessary, a user's trajectory will be down-sampled to have at most `max_points` points. :param style_function: lambda function function specifying the style (weight, color, opacity) of the GeoJson object. :param tiles: str folium's `tiles` parameter. :param zoom: int initial zoom. :param hex_color: str or int hex color of the trajectory line. If `-1` a random color will be generated for each trajectory. :param weight: float thickness of the trajectory line. :param opacity: float opacity (alpha level) of the trajectory line. :param start_end_markers: bool add markers on the start and end points of the trajectory. :return: `folium.Map` object with the plotted trajectories. """ # group by user and keep only the first `max_users` nu = 0 try: # column 'uid' is present in the TrajDataFrame groups = tdf.groupby(constants.UID) except KeyError: # column 'uid' is not present groups = [[None, tdf]] for user, df in groups: if nu >= max_users: break nu += 1 traj = df[[constants.LONGITUDE, constants.LATITUDE]] if max_points is None: di = 1 else: di = max(1, len(traj) // max_points) traj = traj[::di] if nu == 1 and map_f is None: # initialise map center = list(np.median(traj, axis=0)[::-1]) map_f = folium.Map(location=center, zoom_start=zoom, tiles=tiles) trajlist = traj.values.tolist() line = LineString(trajlist) if hex_color == -1: color = get_color(hex_color) else: color = hex_color tgeojson = folium.GeoJson(line, name='tgeojson', style_function=style_function( weight, color, opacity)) tgeojson.add_to(map_f) if start_end_markers: dtime, la, lo = df.loc[df['datetime'].idxmin()]\ [[constants.DATETIME, constants.LATITUDE, constants.LONGITUDE]].values dtime = pd.datetime.strftime(dtime, '%Y/%m/%d %H:%M') mker = folium.Marker(trajlist[0][::-1], icon=folium.Icon(color='green')) popup = folium.Popup('<i>Start</i><BR>{}<BR>Coord: <a href="https://www.google.co.uk/maps/place/{},{}" target="_blank">{}, {}</a>'.\ format(dtime, la, lo, np.round(la, 4), np.round(lo, 4)), max_width=300) mker = mker.add_child(popup) mker.add_to(map_f) dtime, la, lo = df.loc[df['datetime'].idxmax()]\ [[constants.DATETIME, constants.LATITUDE, constants.LONGITUDE]].values dtime = pd.datetime.strftime(dtime, '%Y/%m/%d %H:%M') mker = folium.Marker(trajlist[-1][::-1], icon=folium.Icon(color='red')) popup = folium.Popup('<i>End</i><BR>{}<BR>Coord: <a href="https://www.google.co.uk/maps/place/{},{}" target="_blank">{}, {}</a>'.\ format(dtime, la, lo, np.round(la, 4), np.round(lo, 4)), max_width=300) mker = mker.add_child(popup) mker.add_to(map_f) return map_f
def gen_linestring_feature(location_a, location_b): """used to gen a geojson linestring""" linestring = LineString([location_a.to_geojson(), location_b.to_geojson()]) return Feature(geometry=linestring)