def polygon_tangents(point, polygon): """ Finds the tangents of a (Multi)Polygon from a Point. :param point: Point or Point Feature. :param polygon: (Multi)Polygon or (Multi)Polygon Feature. :return: FeatureCollection of two tangent Point Feature. Example: >>> from turfpy.measurement import polygon_tangents >>> from geojson import Polygon, Point, Feature >>> point = Feature(geometry=Point((61, 5))) >>> polygon = Feature(geometry=Polygon([(11, 0), (22, 4), (31, 0), (31, 11), ... (21, 15), (11, 11), (11, 0)])) >>> polygon_tangents(point, polygon) """ point_coords = get_coords(point) poly_coords = get_coords(polygon) enext = 0 bbox_points = bbox(polygon) nearest_pt_index = 0 nearest = None if (bbox_points[0] < point_coords[0] < bbox_points[2] and bbox_points[1] < point_coords[1] < bbox_points[3]): nearest = nearest_point(point, explode(polygon)) nearest_pt_index = nearest.properties.featureIndex geo_type = get_type(polygon) if geo_type == "Polygon": rtan = poly_coords[0][nearest_pt_index] ltan = poly_coords[0][0] if nearest: if nearest["geometry"]["coordinates"][1] < point_coords[1]: ltan = poly_coords[0][nearest_pt_index] eprev = _is_left( poly_coords[0][0], poly_coords[0][len(poly_coords[0]) - 1], point_coords, ) out = process_polygon(poly_coords[0], point_coords, eprev, enext, rtan, ltan) rtan = out[0] ltan = out[1] elif geo_type == "MultiPolygon": closest_feature = 0 closest_vertex = 0 vertices_counted = 0 for i in range(0, len(poly_coords[0])): closest_feature = i vertice_found = False for i2 in range(0, len(poly_coords[0][i])): closest_vertex = i2 if vertices_counted == nearest_pt_index: vertice_found = True break vertices_counted += 1 if vertice_found: break rtan = poly_coords[0][closest_feature][closest_vertex] ltan = poly_coords[0][closest_feature][closest_vertex] eprev = _is_left( poly_coords[0][0][0], poly_coords[0][0][len(poly_coords[0][0]) - 1], point_coords, ) for ring in poly_coords: out = process_polygon(ring[0], point_coords, eprev, enext, rtan, ltan) rtan = out[0] ltan = out[1] return FeatureCollection( [Feature(geometry=Point(rtan)), Feature(geometry=Point(ltan))])
def test_points_within_polygon(): f1 = Feature(geometry=Point((-46.6318, -23.5523))) f2 = Feature(geometry=Point((-46.6246, -23.5325))) f3 = Feature(geometry=Point((-46.6062, -23.5513))) f4 = Feature(geometry=Point((-46.663, -23.554))) f5 = Feature(geometry=Point((-46.643, -23.557))) f6 = Feature(geometry=Point((-73, 45))) f7 = Feature(geometry=Point((36, 71))) points = FeatureCollection([f1, f2, f3, f4, f5, f6, f7]) poly = Polygon([[ (-46.653, -23.543), (-46.634, -23.5346), (-46.613, -23.543), (-46.614, -23.559), (-46.631, -23.567), (-46.653, -23.560), (-46.653, -23.543), ]]) fpoly = Feature(geometry=poly) poly2 = Polygon([[ (-76.653, -11.543), (-46.634, -23.5346), (-46.613, -23.543), (-46.614, -23.559), (-46.631, -23.567), (-46.653, -23.560), (-76.653, -11.543), ]]) fpoly2 = Feature(geometry=poly2) fc = FeatureCollection([fpoly, fpoly2]) result = points_within_polygon(points, fc) assert result == { "features": [ { "geometry": { "coordinates": [-46.6318, -23.5523], "type": "Point" }, "properties": {}, "type": "Feature", }, { "geometry": { "coordinates": [-46.643, -23.557], "type": "Point" }, "properties": {}, "type": "Feature", }, ], "type": "FeatureCollection", } multi_polygon = Feature(geometry=MultiPolygon([ ([(-81, 41), (-81, 47), (-72, 47), (-72, 41), (-81, 41)], ), ([(3.78, 9.28), (-130.91, 1.52), (35.12, 72.234), (3.78, 9.28)], ), ])) result2 = points_within_polygon(f6, multi_polygon) assert result2 == { "features": [{ "geometry": { "coordinates": [-73, 45], "type": "Point" }, "properties": {}, "type": "Feature", }], "type": "FeatureCollection", }
def point_on_feature(geojson) -> Feature: """ Takes a Feature or FeatureCollection and returns a Point guaranteed to be on the surface of the feature. :param geojson: Feature or FeatureCollection on which the Point is to be found. :return: Feature point which on the provided feature. Example: >>> from turfpy.measurement import point_on_feature >>> from geojson import Polygon, Feature >>> point = Polygon([((116, -36), (131, -32), (146, -43), (155, -25), (133, -9), (111, -22), (116, -36))]) >>> feature = Feature(geometry=point) >>> point_on_feature(feature) """ fc = _normalize(geojson) cent = centroid(fc) on_surface = False i = 0 while not on_surface and i < len(fc["features"]): on_line = False geom = fc["features"][i]["geometry"] if geom["type"] == "Point": if (cent["geometry"]["coordinates"][0] == geom["coordinates"][0] and cent["geometry"]["coordinates"][1] == geom["coordinates"][1]): on_surface = True elif geom["type"] == "MultiPoint": on_multi_point = False k = 0 while not on_multi_point and k < len(geom["coordinates"]): if (cent["geometry"]["coordinates"][0] == geom["coordinates"][k][0] and cent["geometry"]["coordinates"][1] == geom["coordinates"][k][1]): on_surface = True on_multi_point = True k += 1 elif geom["type"] == "LineString": k = 0 while not on_line and k < len(geom["coordinates"]) - 1: x = cent["geometry"]["coordinates"][0] y = cent["geometry"]["coordinates"][1] x1 = geom["coordinates"][k][0] y1 = geom["coordinates"][k][1] x2 = geom["coordinates"][k + 1][0] y2 = geom["coordinates"][k + 1][1] if _point_on_segment(x, y, x1, y1, x2, y2): on_line = True on_surface = True k += 1 elif geom["type"] == "MultiLineString": j = 0 while j < len(geom["coordinates"]): on_line = False k = 0 line = geom["coordinates"][j] while not on_line and k < len(line) - 1: x = cent["geometry"]["coordinates"][0] y = cent["geometry"]["coordinates"][1] x1 = line[k][0] y1 = line[k][1] x2 = line[k + 1][0] y2 = line[k + 1][1] if _point_on_segment(x, y, x1, y1, x2, y2): on_line = True on_surface = True k += 1 j += 1 elif geom["type"] == "Polygon" or geom["type"] == "MultiPolygon": if boolean_point_in_polygon(cent, geom): on_surface = True i += 1 if on_surface: return cent else: vertices_list = [] for i in range(0, len(fc["features"])): vertices_list.extend(explode(fc["features"][i])["features"]) vertices = FeatureCollection(vertices_list) point = Point(nearest_point(cent, vertices)["geometry"]["coordinates"]) return Feature(geometry=point)
# 3. Se lee el CSV para escribir en el primer CSV with open('SOURCE/20200609184622.csv') as readerCSV: rowCSV = csv.DictReader(readerCSV) writer = csv.DictWriter(writerCSV, fieldnames=rowCSV.fieldnames) writer.writeheader() for item in rowCSV: item['Longitude'] = item['Longitude'].replace("West:", "-") item['Latitude'] = item['Latitude'].replace("South:", "-") writer.writerow(item) features = [] with open('CSV/{}.csv'.format(nameCSV)) as csvFile: rowCSV = csv.DictReader(csvFile) for item in rowCSV: latitude, longitude = map(float, (item['Latitude'], item['Longitude'])) features.append( Feature(geometry=Point((longitude, latitude)), properties={ 'N': item['\xef\xbb\xbfNo.'], 'Device Name': item['Device Name'], 'Receiving Time': item['Receiving Time'], 'GPS Time': item['GPS Time'], 'Speed': item['Speed'], 'Direction': item['Direction'], 'BaseStation ID': item['BaseStation ID'], 'Channel ID': item['Channel ID'] })) collection = FeatureCollection(features) with open('CSV/{}.json'.format(nameCSV), "w") as f: f.write('%s' % collection)
def get_labs(format): """Gets Fab Lab data from fablabs.io.""" fablabs_json = data_from_fablabs_io(fablabs_io_labs_api_url_v0) fablabs = {} # Load all the FabLabs for i in fablabs_json["labs"]: current_lab = FabLab() current_lab.address_1 = i["address_1"] current_lab.address_2 = i["address_2"] current_lab.address_notes = i["address_notes"] current_lab.avatar = i["avatar"] current_lab.blurb = i["blurb"] current_lab.capabilities = i["capabilities"] current_lab.city = i["city"] current_lab.country_code = i["country_code"] current_lab.county = i["county"] current_lab.description = i["description"] current_lab.email = i["email"] current_lab.header_image_src = i["header_image_src"] current_lab.id = i["id"] current_lab.kind_name = i["kind_name"] # Some labs do not have coordinates if i["latitude"] is None or i["longitude"] is None: address = i["address_1"] + i["city"] + i["country_code"] try: location = geolocator.geocode(address) current_lab.latitude = location.latitude current_lab.longitude = location.longitude except: try: location = geolocator.geocode(i["city"]) current_lab.latitude = location.latitude current_lab.longitude = location.longitude except: # For labs without a city, add 0,0 as coordinates current_lab.latitude = 0.0 current_lab.longitude = 0.0 else: current_lab.latitude = i["latitude"] current_lab.longitude = i["longitude"] current_lab.links = i["links"] current_lab.name = i["name"] current_lab.parent_id = i["parent_id"] current_lab.phone = i["phone"] current_lab.postal_code = i["postal_code"] current_lab.slug = i["slug"] current_lab.url = i["url"] # Add the lab fablabs[i["slug"]] = current_lab # Return a dictiornary / json if format.lower() == "dict" or format.lower() == "json": output = {} for j in fablabs: output[j] = fablabs[j].__dict__ # Return a geojson elif format.lower() == "geojson" or format.lower() == "geo": labs_list = [] for l in fablabs: single = fablabs[l].__dict__ single_lab = Feature(type="Feature", geometry=Point((single["latitude"], single["longitude"])), properties=single) labs_list.append(single_lab) output = dumps(FeatureCollection(labs_list)) # Return an object elif format.lower() == "object" or format.lower() == "obj": output = fablabs # Default: return an oject else: output = fablabs return output
def osmproxy(request): url = request.params.get("url") if url is None: return HTTPBadRequest() # instantiate parser and parser and start parsing parser = RelationParser() p = OSMParser(concurrency=1, coords_callback=parser.get_coords, relations_callback=parser.get_relations, ways_callback=parser.get_ways) temp = tempfile.NamedTemporaryFile(suffix='.osm') urllib.urlretrieve(url, temp.name) p.parse(temp.name) temp.close() polygons = [] r = parser.relation # first check for self closing ways for i in range(len(r) - 1, 0, -1): w = parser.ways[r[i]] if w[len(w) - 1] == w[0]: r.pop(i) nodes = [] polygon = Polygon([parser.nodes[node] for node in w]) polygons.append(polygon) if len(r) > 0: prev = parser.ways[r[0]] ordered_ways = [] ordered_ways.append(prev) r.pop(0) while len(r): match = False for i in range(0, len(r)): w = parser.ways[r[i]] # first node of the next way matches the last of the previous one if w[0] == prev[len(prev) - 1]: match = w # or maybe the way has to be reversed elif w[len(w) - 1] == prev[len(prev) - 1]: match = w[::-1] if match: prev = match ordered_ways.append(match) r.pop(i) break if len(ordered_ways) > 0: # now that ways are correctly ordered, we can create a unique geometry nodes = [] for way in ordered_ways: for node in way: nodes.append(parser.nodes[node]) # make sure that first and last node are similar if nodes[0] != nodes[len(nodes) - 1]: raise # create a shapely polygon with the nodes polygons.append(Polygon(nodes)) multipolygon = MultiPolygon(polygons) return FeatureCollection([Feature(geometry=multipolygon)])
'end_point': end_point } return out # random coords lons = np.random.uniform(16.22, 16.53, 2).round(6) lats = np.random.uniform(48.14, 48.28, 2).round(6) start_point = nearest_road_point(lons[0], lats[0], 2020) end_point = nearest_road_point(lons[1], lats[1], 2020) features2 = [] for year in range(2014, 2021): req = get_route(start_point[0], start_point[1], end_point[0], end_point[1], year) route_json = req['geometry'] route_time = req['duration'] route_dist = req['distance'] features2.append( Feature(geometry=route_json, properties={ "year": year, "duration": route_time, "distance": route_dist })) feature_collection2 = FeatureCollection(features2) with open('id_fastest.geojson', 'w') as file: dump(feature_collection2, file)
def any_vector_to_fc( vector: Union[Dict, Feature, FeatureCollection, List, GeoDataFrame, Polygon, Point, ], as_dataframe: bool = False, ) -> Union[Dict, GeoDataFrame]: """ Gets a uniform feature collection dictionary (with fc and f bboxes) from any input vector type. Args: vector: One of Dict, FeatureCollection, Feature, List of bounds coordinates, GeoDataFrame, shapely.geometry.Polygon, shapely.geometry.Point. All assume EPSG 4326 and Polygons! as_dataframe: GeoDataFrame output with as_dataframe=True. """ if not isinstance( vector, ( dict, FeatureCollection, Feature, geojson_Polygon, list, GeoDataFrame, Polygon, Point, ), ): raise ValueError( "The provided geometry muste be a FeatureCollection, Feature, Dict, geopandas " "Dataframe, shapely Polygon, shapely Point or a list of 4 bounds coordinates." ) ## Transform all possible input geometries to a uniform feature collection. vector = copy.deepcopy(vector) # otherwise changes input geometry. if isinstance(vector, (dict, FeatureCollection, Feature)): try: if vector["type"] == "FeatureCollection": df = GeoDataFrame.from_features(vector, crs=4326) elif vector["type"] == "Feature": # TODO: Handle point features? df = GeoDataFrame.from_features(FeatureCollection([vector]), crs=4326) elif vector["type"] == "Polygon": # Geojson geometry df = GeoDataFrame.from_features(FeatureCollection( [Feature(geometry=vector)]), crs=4326) except KeyError as e: raise ValueError( "Provided geometry dictionary has to include a featurecollection or feature." ) from e else: if isinstance(vector, list): if len(vector) == 4: box_poly = shapely.geometry.box(*vector) df = GeoDataFrame({"geometry": [box_poly]}, crs=4326) else: raise ValueError("The list requires 4 bounds coordinates.") elif isinstance(vector, Polygon): df = GeoDataFrame({"geometry": [vector]}, crs=4326) elif isinstance(vector, Point): df = GeoDataFrame( {"geometry": [vector.buffer(0.00001)]}, crs=4326 ) # Around 1m buffer # TODO: Find better solution than small buffer? elif isinstance(vector, GeoDataFrame): df = vector try: if df.crs.to_string() != "EPSG:4326": df = df.to_crs(epsg=4326) except AttributeError as e: raise AttributeError("GeoDataFrame requires a crs.") from e if as_dataframe: return df else: fc = df.__geo_interface__ return fc
def result_to_geojson(features, fname): collection = FeatureCollection(features) with open(fname, "w") as f: f.write('%s' % collection)
def fulltextsearch(self): lang = locale_negotiator(self.request) try: language = self.languages[lang] except KeyError: return HTTPInternalServerError( detail="{0!s} not defined in languages".format(lang)) if "query" not in self.request.params: return HTTPBadRequest(detail="no query") terms = self.request.params.get("query") maxlimit = self.settings.get("maxlimit", 200) try: limit = int( self.request.params.get("limit", self.settings.get("defaultlimit", 30))) except ValueError: return HTTPBadRequest(detail="limit value is incorrect") if limit > maxlimit: limit = maxlimit try: partitionlimit = int(self.request.params.get("partitionlimit", 0)) except ValueError: return HTTPBadRequest(detail="partitionlimit value is incorrect") if partitionlimit > maxlimit: partitionlimit = maxlimit terms_ts = "&".join( w + ":*" for w in IGNORED_CHARS_RE.sub(" ", terms).split(" ") if w != "") _filter = FullTextSearch.ts.op("@@")(func.to_tsquery( language, terms_ts)) if self.request.user is None: _filter = and_(_filter, FullTextSearch.public.is_(True)) else: _filter = and_( _filter, or_( FullTextSearch.public.is_(True), FullTextSearch.role_id.is_(None), FullTextSearch.role_id.in_( [r.id for r in self.request.user.roles]), ), ) if "interface" in self.request.params: _filter = and_( _filter, or_( FullTextSearch.interface_id.is_(None), FullTextSearch.interface_id == self._get_interface_id( self.request.params["interface"]), ), ) else: _filter = and_(_filter, FullTextSearch.interface_id.is_(None)) _filter = and_( _filter, or_(FullTextSearch.lang.is_(None), FullTextSearch.lang == lang)) rank_system = self.request.params.get("ranksystem") if rank_system == "ts_rank_cd": # The numbers used in ts_rank_cd() below indicate a normalization method. # Several normalization methods can be combined using |. # 2 divides the rank by the document length # 8 divides the rank by the number of unique words in document # By combining them, shorter results seem to be preferred over longer ones # with the same ratio of matching words. But this relies only on testing it # and on some assumptions about how it might be calculated # (the normalization is applied two times with the combination of 2 and 8, # so the effect on at least the one-word-results is therefore stronger). rank = func.ts_rank_cd(FullTextSearch.ts, func.to_tsquery(language, terms_ts), 2 | 8) else: # Use similarity ranking system from module pg_trgm. rank = func.similarity(FullTextSearch.label, terms) if partitionlimit: # Here we want to partition the search results based on # layer_name and limit each partition. row_number = (func.row_number().over( partition_by=FullTextSearch.layer_name, order_by=(desc(rank), FullTextSearch.label)).label("row_number")) subq = DBSession.query(FullTextSearch).add_columns( row_number).filter(_filter).subquery() query = DBSession.query(subq.c.id, subq.c.label, subq.c.params, subq.c.layer_name, subq.c.the_geom, subq.c.actions) query = query.filter(subq.c.row_number <= partitionlimit) else: query = DBSession.query(FullTextSearch).filter(_filter) query = query.order_by(desc(rank)) query = query.order_by(FullTextSearch.label) query = query.limit(limit) objs = query.all() features = [] for o in objs: properties = {"label": o.label} if o.layer_name is not None: properties["layer_name"] = o.layer_name if o.params is not None: properties["params"] = o.params if o.actions is not None: properties["actions"] = o.actions if o.actions is None and o.layer_name is not None: properties["actions"] = [{ "action": "add_layer", "data": o.layer_name }] if o.the_geom is not None: geom = to_shape(o.the_geom) feature = Feature(id=o.id, geometry=geom, properties=properties, bbox=geom.bounds) features.append(feature) else: feature = Feature(id=o.id, properties=properties) features.append(feature) return FeatureCollection(features)
def label_to_geojson(img_label, label, simplify_tol=1.5): """ Function reading a label image, and returning a feature collection following the geojson standard. Args: img_label (numpy array): numpy data, with each object being assigned with a unique uint number label (str): like 'cell', 'nuclei' simplify_tol (float): give a higher number if you want less coordinates. """ # for img_label, for cells on border, make sure on border pixels are # set to 0 shape_x, shape_y = img_label.shape shape_x, shape_y = shape_x - 1, shape_y - 1 img_label[0, :] = img_label[:, 0] = img_label[ shape_x, :] = img_label[:, shape_y] = 0 features = [] # Get all object ids, remove 0 since this is background ind_objs = np.unique(img_label) ind_objs = np.delete(ind_objs, np.where(ind_objs == 0)) for obj_int in np.nditer(ind_objs, flags=["zerosize_ok"]): # Create binary label for current object and find contour img_label_loop = np.zeros((img_label.shape[0], img_label.shape[1])) img_label_loop[img_label == obj_int] = 1 contours_find = measure.find_contours(img_label_loop, 0.5) if len(contours_find) == 1: index = 0 else: pixels = [] for _, item in enumerate(contours_find): pixels.append(len(item)) index = np.argmax(pixels) contour = contours_find[index] contour_as_numpy = contour[:, np.argsort([1, 0])].astype('uint16') contour_as_numpy[:, 1] = np.array( [img_label.shape[0] - h[0] for h in contour]) contour_asList = contour_as_numpy.tolist() if simplify_tol is not None: poly_shapely = shapely_polygon(contour_asList) poly_shapely_simple = poly_shapely.simplify( simplify_tol, preserve_topology=False) contour_asList = list(poly_shapely_simple.exterior.coords) # Create and append feature for geojson pol_loop = geojson_polygon([contour_asList]) full_label = label + "_idx" index_number = int(obj_int - 1) features.append( Feature(geometry=pol_loop, properties={ full_label: index_number, "label": label })) feature_collection = FeatureCollection( features, bbox=[0, 0, img_label.shape[1] - 1, img_label.shape[0] - 1]) return feature_collection, features
def analyze_nest_data(config): """ Analyze nest data """ def _city_progress(count, total, status=""): status = "[{}] {}".format(config["area_name"], status) progress(count, total, status) start_time = time.time() osm_file_name = FILENAME.format(area=config['area_name'], date=config['osm_date']) try: with io.open(osm_file_name, mode='r', encoding=config["encoding"]) as osm_file: print("OSM Data file found, we will use this! :D") nest_json = json.loads(osm_file.read()) except IOError: print("No OSM Data file found, will get the data now.\n") nest_url = osm_uri( config['p1_lat'], config['p1_lon'], config['p2_lat'], config['p2_lon'], config['osm_date'], relations=config['analyze_multipolygons'], ) print("{} Overpass url:".format(config["area_name"])) print(nest_url) print("Getting OSM Data...") osm_session = requests.Session() response = osm_session.get(nest_url) response.raise_for_status() nest_json = response.json() print(nest_json) # global nest_json if not nest_json["elements"]: print("\nDid not get any Data from the API:") if "remark" in nest_json: print(nest_json["remark"]) return with io.open(osm_file_name, mode='w', encoding=config["encoding"]) as osm_file: osm_file.write(response.text) print("OSM Data received and is saved in OSM Data file") # global nest_json if not nest_json: print("Error getting osm data from file") print(nest_json) return print("Getting OSM Data...Complete (took {} seconds)".format(time.time())) # Read the Area Data File area_file_name = PARKNAME_FILE.format(area=config['area_name']) area_file_data = dict() try: with io.open(area_file_name, mode='r', encoding=config["encoding"]) as area_file: print("Area Data file found, we will use this! :D") dict_reader = csv.DictReader( area_file, quotechar='"', quoting=csv.QUOTE_MINIMAL, ) for line in dict_reader: area_file_data[line["osm_id"]] = { "name": line["name"], "center_lat": line["center_lat"], "center_lon": line["center_lon"], } except FileNotFoundError: print("No Area Data file found, we will create it at the end\n") # Get Event Data event_pokes = set(config['event_poke']) if config['event_automation']: print("Event-Automation active, checking for active events") serebii = SerebiiPokemonGo() active_events = serebii.get_active_events() event_pokes = set() if active_events: print("Active Events found:") print(active_events) for event in active_events: event_pokes.update(event.pokemon) else: print( "Currently no active Event found, no event pokemon will be used" ) if NEST_SPECIES_LIST: nest_mons = set(NEST_SPECIES_LIST) - event_pokes else: nest_mons = set() print("##" * 20) nodes = dict() ways = dict() relations = dict() for element in nest_json['elements']: if not "type" in element: continue if element["type"] == "node": nodes[element["id"]] = { "lat": element["lat"], "lon": element["lon"] } elif element["type"] == "way": if "nodes" not in element and not element["nodes"]: continue ways[element["id"]] = element elif element["type"] == "relation": if "members" not in element and not element["members"]: continue relations[element["id"]] = element print("Initialize/Start DB Session") mydb_r = connect(host=config['db_r_host'], user=config['db_r_user'], passwd=config['db_r_pass'], database=config['db_r_name'], port=config['db_r_port'], charset=config['db_r_charset'], autocommit=True) mydb_w = connect(host=config['db_w_host'], user=config['db_w_user'], passwd=config['db_w_pass'], database=config['db_w_name'], port=config['db_w_port'], charset=config['db_w_charset'], autocommit=True) mycursor_r = mydb_r.cursor() mycursor_w = mydb_w.cursor() print("Connection clear") # Delete old Nest data if config['delete_old']: print("Delete Old Nests") mycursor_w.execute( NEST_DELETE_QUERY.format(db_name=config['db_w_name'], db_nests=config['db_nest'])) print("Delete Old Nests - Complete") print("Start Analyzing Nests") # Check Relations def _convert_way(way): area_points = list() for point in way["nodes"]: point_coords = nodes[point] area_points.append([point_coords['lon'], point_coords['lat']]) if len(area_points) < 3: return None # I know i don't need, but return alone looks sad ^^ return geometry.Polygon(area_points) with open(POKE_NAMES_FILE) as pk_names_file: poke_names = json.load(pk_names_file) with open(config['dc-locale-file']) as loc_file: locale = json.load(loc_file) areas = dict() areas_basic = dict() relations_len = len(relations) for (idx, (_id, relation)) in enumerate(relations.items(), start=1): relation_name = config['default_park_name'] if str(_id) in area_file_data: print("ID Found in Area File, will use data from area file") relation_name = area_file_data[str(_id)]["name"] elif "tags" in relation and "name" in relation["tags"]: relation_name = relation["tags"]["name"] _city_progress( idx, relations_len, "({}/{}) {}".format(idx, relations_len, "Starting to analyze Nest - Check Relations")) inner_members = list() outer_members = list() for member in relation["members"]: role = member["role"] if member["type"] == "node": # this means, this is just a single poi inside the relation continue way = ways.pop(member["ref"], None) if way is None: continue way_poly = _convert_way(way) if way_poly is None: continue if role == "inner": inner_members.append(way_poly) else: #role == "outer" or no inner/outer infos are given outer_members.append(way_poly) outer_polygon = geometry.MultiPolygon(outer_members).buffer(0) inner_polygon = geometry.MultiPolygon(inner_members).buffer(0) final_polygon = None if outer_polygon and inner_polygon: final_polygon = outer_polygon.symmetric_difference( inner_polygon).difference(inner_polygon) elif outer_polygon: final_polygon = outer_polygon elif inner_polygon: final_polygon = inner_polygon area_shapeley_poly = final_polygon.convex_hull if _id in area_file_data: center_lat = float(area_file_data[str(_id)]["center_lat"]) center_lon = float(area_file_data[str(_id)]["center_lon"]) area_center_point = geometry.Point(center_lat, center_lon) else: area_center_point = area_shapeley_poly.centroid if not area_shapeley_poly.bounds: continue min_lon, min_lat, max_lon, max_lat = area_shapeley_poly.bounds area_poly_props = { "name": relation_name, "stroke": config["json-stroke"], "stroke-width": config['json-stroke-width'], "stroke-opacity": config['json-stroke-opacity'], "fill": config['json-fill'], "fill-opacity": config['json-fill-opacity'], "area_center_point": area_center_point, "min_lon": min_lon, "min_lat": min_lat, "max_lon": max_lon, "max_lat": max_lat, } feat = Feature(geometry=final_polygon, id=_id, properties=area_poly_props) areas[_id] = feat # Check Ways all_areas = list() failed_nests = defaultdict(int) ways_len = len(ways) for (idx, (_id, way)) in enumerate(ways.items(), start=1): way_name = config['default_park_name'] if str(_id) in area_file_data: way_name = area_file_data[str(_id)]["name"] elif "tags" in way and "name" in way["tags"]: way_name = way["tags"]["name"] _city_progress( idx, ways_len, "({}/{}) {}".format(idx, ways_len, "Starting to analyze Nest - Check Ways")) way_points = list() for point in way['nodes']: point_coords = nodes[point] way_points.append([point_coords['lon'], point_coords['lat']]) if len(way_points) < 3: continue way_poly = geometry.Polygon(way_points) way_shapeley_poly = way_poly.convex_hull if str(_id) in area_file_data: center_lat = float(area_file_data[str(_id)]["center_lat"]) center_lon = float(area_file_data[str(_id)]["center_lon"]) way_center_point = geometry.Point(center_lat, center_lon) else: way_center_point = way_shapeley_poly.centroid min_lon, min_lat, max_lon, max_lat = way_shapeley_poly.bounds way_poly_props = { "name": way_name, "stroke": config["json-stroke"], "stroke-width": config['json-stroke-width'], "stroke-opacity": config['json-stroke-opacity'], "fill": config['json-fill'], "fill-opacity": config['json-fill-opacity'], "area_center_point": way_center_point, "min_lon": min_lon, "min_lat": min_lat, "max_lon": max_lon, "max_lat": max_lat, } feat = Feature(geometry=way_poly, id=_id, properties=way_poly_props) areas[_id] = feat # NOW CHECK ALL AREAS ONE AFTER ANOTHER areas_len = len(areas) for (idx, (_id, area)) in enumerate(areas.items(), start=1): area_points = area["geometry"] area_prop = area["properties"] area_center_point = area_prop["area_center_point"] min_lon = area_prop["min_lon"] min_lat = area_prop["min_lat"] max_lon = area_prop["max_lon"] max_lat = area_prop["max_lat"] area_pokestops = dict() if config['pokestop_pokemon']: # Get all Pokestops with id, lat and lon _city_progress( idx, areas_len, "({}/{}) {}".format( idx, areas_len, "Get all Pokestops within min/max lat/lon")) pokestop_sel_query = POKESTOP_SELECT_QUERY.format( db_name=config['db_r_name'], db_pokestop=config['db_pokestop'], min_lat=min_lat, max_lat=max_lat, min_lon=min_lon, max_lon=max_lon) mycursor_r.execute(pokestop_sel_query) myresult_pokestops = mycursor_r.fetchall() _city_progress( idx, areas_len, "({}/{}) {}".format( idx, areas_len, "Got all wanted Pokestops - now filter them")) for pkstp in myresult_pokestops: pkst_point = geometry.Point(pkstp[2], pkstp[1]) if pkst_point.within(geometry.shape(area_points)): area_pokestops[pkstp[0]] = pkst_point _city_progress( idx, areas_len, "({}/{}) {}".format(idx, areas_len, "Filtering of all Pokestops complete")) area_spawnpoints = dict() _city_progress( idx, areas_len, "({}/{}) {}".format(idx, areas_len, "Get all Spawnpoints within min/max lat/lon")) # Get all Spawnpoints with id, lat and lon spawnpoint_sel_query = SPAWNPOINT_SELECT_QUERY.format( db_name=config['db_r_name'], db_spawnpoint=config['db_spawnpoint'], sp_id=config['db_spawnpoint_id'], lat=config['db_spawnpoint_lat'], lon=config['db_spawnpoint_lon'], min_lat=min_lat, max_lat=max_lat, min_lon=min_lon, max_lon=max_lon) mycursor_r.execute(spawnpoint_sel_query) my_result_spawnpoints = mycursor_r.fetchall() _city_progress( idx, areas_len, "({}/{}) {}".format( idx, areas_len, "Got all wanted Spawnpoints - now filter them")) for spwn in my_result_spawnpoints: spwn_point = geometry.Point(spwn[2], spwn[1]) if spwn_point.within(geometry.shape(area_points)): area_spawnpoints[spwn[0]] = spwn_point _city_progress( idx, areas_len, "({}/{}) {}".format(idx, areas_len, "Filtering of all Spawnpoints complete")) if not area_pokestops and not area_spawnpoints: failed_nests[ "Park has no Stops and no Spawnpoints, ignore it"] += 1 continue if (len(area_pokestops) < 1) and (len(area_spawnpoints) < config['min_spawn']): failed_nests["Park has not enough Spawnpoints, ignore it"] += 1 continue spawnpoint_in = "'{}'".format("','".join( str(nr) for nr in area_spawnpoints)) pokestop_in = "'{}'".format("','".join( str(nr) for nr in area_pokestops)) # Use data since last change: reset_time = int(time.time()) - (config['timespan'] * 3600) # RDM uses pokestop_ids, MAD not if config['pokestop_pokemon']: _city_progress( idx, areas_len, "({}/{}) {}".format( idx, areas_len, "Get all Pokes from stops and spawnpoints within nest area" )) nest_query = NEST_SELECT_QUERY_STOP if not config['use_unix_timestamp']: nest_query = NEST_SELECT_QUERY_STOP.replace( "UNIX_TIMESTAMP({pokemon_timestamp})", "{pokemon_timestamp}") else: _city_progress( idx, areas_len, "({}/{}) {}".format( idx, areas_len, "Get all Pokes from spawnpoints within nest area")) nest_query = NEST_SELECT_QUERY if not config['use_unix_timestamp']: nest_query = NEST_SELECT_QUERY.replace( "UNIX_TIMESTAMP({pokemon_timestamp})", "{pokemon_timestamp}") query = nest_query.format( db_name=config['db_r_name'], db_pokemon_table=config['db_pokemon'], pokemon_timestamp=config['db_pokemon_timestamp'], pokestop_in=pokestop_in, spawn_id=config['db_pokemon_spawnid'], spawnpoint_in=spawnpoint_in, nest_mons=str(tuple(nest_mons)), reset_time=str(reset_time)) poke_id = 1 poke_count = 1 mycursor_r.execute(query) poke_data = mycursor_r.fetchone() if poke_data: poke_id, poke_count = map(int, poke_data) _city_progress( idx, areas_len, "({}/{}) {}".format(idx, areas_len, "Got all Pokes from Nest area")) # (Area_poke/timespan)*(24/scan_hours) poke_avg = round((poke_count / float(config['timespan'])) * (24.00 / float(config['scan_hours'])), 2) _city_progress( idx, areas_len, "({}/{}) {}".format(idx, areas_len, "Filter and insert Nests")) if poke_count < config['min_pokemon']: failed_nests[ "Not enough Pokes in this Area to specify a real Nest"] += 1 continue if poke_avg < config['min_avg_pokemon']: failed_nests["Average lower than the min average in config"] += 1 continue current_time = int(time.time()) _city_progress( idx, areas_len, "({}/{}) {}".format(idx, areas_len, "Found Probable Nest - insert it now in db")) # Insert Nest data to db insert_query = NEST_INSERT_QUERY.format(db_name=config['db_w_name'], db_nests=config['db_nest']) insert_args = { "nest_id": str(area['id']), "name": area["properties"]["name"], "lat": float(area_center_point.x), "lon": float(area_center_point.y), "pokemon_id": int(poke_id), "type": 0, "pokemon_count": float(poke_count), "pokemon_avg": float(poke_avg), "current_time": current_time } area_file_data[str(area['id'])] = { "name": area["properties"]["name"], "center_lat": float(area_center_point.x), "center_lon": float(area_center_point.y), } mycursor_w.execute(insert_query, insert_args) all_areas.append(area) insert_args["pokemon_name"] = poke_names[str(poke_id)][ config["dc-language"]] insert_args["pokemon_type"] = poke_names[str(poke_id)]["type"] insert_args["pokemon_shiny"] = poke_names[str(poke_id)]["shiny"] areas_basic[str(area['id'])] = insert_args mydb_r.close() mydb_w.close() print("\nNest analyzing took {:.2f} minutes".format( (time.time() - start_time) / 60)) if all_areas: print("All Nests Added ({}):\n############".format(len(all_areas))) else: print("No Nests Added") if failed_nests: print("No nest reasons:\n############") for (key, value) in failed_nests.items(): print("{}: {}".format(key, value)) else: print("No false positive Parks") def discord_webhook(): """ Send nest data to discord. """ # Sort basic areas sorted_basic_areas = OrderedDict( sorted(areas_basic.items(), key=lambda kv: kv[1][config["dc-sort-by"]], reverse=config["dc-sort-reverse"])) content = defaultdict(str) content_page = 0 for b_area in sorted_basic_areas.values(): if config['dc-ignore-unnamed'] and ( b_area["name"] == config["default_park_name"]): continue if float(b_area["pokemon_avg"]) < config["dc-min-spawns-for-post"]: continue nest_time = datetime.utcfromtimestamp(int( b_area["current_time"])).strftime('%Y-%m-%d %H:%M:%S') park_name = b_area["name"] g_map_ref = '<https://maps.google.com/maps?q={lon:.5f},{lat:.5f}>'.format( lat=b_area["lat"], lon=b_area["lon"]) g_maps = "[Google Maps]({})".format(g_map_ref) park_name_g = u"[{name}]({map_ref})".format(name=park_name, map_ref=g_map_ref) custom_map_link = '<{map_link}>'.format( map_link=config["dc-map-link"]) custom_map_ref = custom_map_link.format(lat=b_area["lat"], lon=b_area["lon"]) m_maps = "[Map Link]({})".format(custom_map_ref) park_name_m = u"[{name}]({map_ref})".format(name=park_name, map_ref=custom_map_ref) poke_shiny = "" if b_area["pokemon_shiny"]: poke_shiny = locale["poke-shiny-emoji"] + " " # convert types: poke_type_emojis = list() for typ in b_area["pokemon_type"]: poke_type_emojis.append(locale["poke-type-emoji"][typ]) text = (config["dc-text"] + u"\n").format( park_name=park_name, park_name_g=park_name_g, park_name_m=park_name_m, poke_id=b_area["pokemon_id"], poke_name=b_area["pokemon_name"], poke_shiny=poke_shiny, poke_avg=b_area["pokemon_avg"], poke_type="/".join(b_area["pokemon_type"]), poke_type_emoji="/".join(poke_type_emojis), time=nest_time, g_maps=g_maps, m_maps=m_maps) if len(content[content_page] + text) < DISCORD_MAX_MSG: content[content_page] += text else: content_page += 1 content[content_page] += text def send_webhook(payload): """ Send payload to webhook. """ webhooks = json.loads(config["dc-webhook"]) if not isinstance(webhooks, list): webhooks = [webhooks] for webhook in webhooks: result = requests.post( webhook, data=json.dumps(payload), headers={"Content-Type": "application/json"}) if result.status_code > 300: print("Error while sending Webhook") print(result.text) time.sleep(DISCORD_RATE_LIMIT) # Send Title of Nest Data: nest_title = config["dc-title"].format( area_name=config["area_name"]) + "\n" nest_title += ("-" * len(nest_title)) payload = {"username": config["dc-username"], "content": nest_title} send_webhook(payload) # Send Nest Data for cont in content.values(): payload = {"username": config["dc-username"], "content": cont} send_webhook(payload) if config["dc-enabled"]: discord_webhook() if config['geojson_extend']: with open(config['save_path'], 'r') as old_file_: old_geojson = json.load(old_file_) all_areas += old_geojson['features'] print('old areas added to the new ones') with open(config['save_path'], 'w') as file_: print('write geojson') file_.write(dumps(FeatureCollection(all_areas), indent=4)) print("geoJSON saved successfully") with io.open(area_file_name, mode='w', encoding=config["encoding"]) as area_file: print("writing area data file...") fieldnames = [u"name", u"center_lat", u"center_lon", u"osm_id"] dict_writer = csv.DictWriter( area_file, fieldnames=fieldnames, quotechar='"', quoting=csv.QUOTE_MINIMAL, ) dict_writer.writeheader() # This ONLY WORKS on Python3 str, unicode with write for a_id, a_data in area_file_data.items(): dict_writer.writerow({ "osm_id": a_id, "name": u"" + a_data["name"], "center_lat": a_data["center_lat"], "center_lon": a_data["center_lon"], }) print("area data file saved successfully")
import requests import json import geojson from geojson import Feature, Point, FeatureCollection # response = requests.get('https://kbus.doublemap.com/map/v2/buses'); # buses = json.loads(response.text) buses = json.loads( '[{"id":11,"name":"59","lat":45.0709,"lon":-64.54762,"heading":328,"route":66,"lastStop":1748,"fields":{},"bus_type":"bus","lastUpdate":1582415938}]' ) outBuses = [] for b in buses: outBuses.append(Feature(geometry=Point((b.get('lon'), b.get('lat'))))) print(geojson.dumps(FeatureCollection(outBuses)))
def create_markers(data): features = [] # for fire in fire_data: if data.user_lat: user_feature = Feature(geometry=Point((data.user_lng, data.user_lat)), properties=({"description": 'Current Location', "radius": data.user_radius, "id": 'user', "alert": None, "type": "text" })) features.append(user_feature) else: user_feature = Feature(geometry=Point(), properties=({"description": 'Current Location', "radius": data.user_radius, "id": 'user', "alert": None, "type": "text" })) features.append(user_feature) if data.fav1_lat: user_feature = Feature(geometry=Point((data.fav1_lng, data.fav1_lat)), properties=({"description": data.fav1_desc, "radius": data.fav1_radius, "id": 'fav1', "alert": None, "type": "text" })) features.append(user_feature) else: user_feature = Feature(geometry=Point(), properties=({"description": "fav1", "radius": data.fav1_radius, "id": 'fav1', "alert": None, "type": "hidden" })) features.append(user_feature) if data.fav2_lat: user_feature = Feature(geometry=Point((data.fav2_lng, data.fav2_lat)), properties=({"description": data.fav2_desc, "radius": data.fav2_radius, "id": 'fav2', "alert": None, "type": "text" })) features.append(user_feature) else: user_feature = Feature(geometry=Point(), properties=({"description": "fav2", "radius": data.fav2_radius, "id": 'fav2', "alert": None, "type": "hidden" })) features.append(user_feature) feature_collection = FeatureCollection(features) return feature_collection
def main(): connection = ftp_connection(cfg.url, cfg.username, cfg.password) connection.cwd("FIRMS/c6/Global") root_path = os.path.dirname(os.path.abspath(__file__)) date_list = get_last_n_dates(n=2) satellite_fname = "MODIS_C6_Global_MCD14DL_NRT_{0}.txt" outfile = open(cfg.outfile, mode="w") feature_collection = list() for day in date_list: julian_date = calendar2julian(day) satellite_file = satellite_fname.format(julian_date) if satellite_file not in connection.nlst(): print "The file {0} does not exist!".format(satellite_file) continue downloaded_fname = os.path.join(root_path, "{0}.csv".format(julian_date)) download_file(connection, satellite_file, downloaded_fname) downloaded_file = open(downloaded_fname, "r") reader = csv.DictReader(downloaded_file) for row in reader: # continue if the fire was recorded over 24 hours ago acquisition_date = datetime.strptime("{0} {1}".format(row["acq_date"], row["acq_time"]), "%Y-%m-%d %H:%M") today = datetime.utcnow() record_age = (today - acquisition_date).total_seconds()/3600.00 if record_age > 24.00: continue else: row["record_age"] = int(record_age) row["acq_datetime"] = str(acquisition_date) point = Point((float(row["longitude"]), float(row["latitude"]))) feature = Feature(geometry=point) for key in row: if key not in ["longitude", "latitude", "acq_date", "acq_time"]: feature.properties[key] = row[key] feature_collection.append(feature) downloaded_file.close() os.remove(downloaded_fname) geojson_content = FeatureCollection(feature_collection) reference_date = datetime(1970, 1, 1, 0, 0) geojson_content.features = sorted(geojson_content.features, key=lambda x: datetime.strptime(x["properties"]["acq_datetime"], "%Y-%m-%d %H:%M:%S"), reverse=True) json.dump(geojson_content, outfile, indent=2, separators=(",", ": ")) outfile.close() connection.close()
async def run(): print((FeatureCollection(features=[ feature async for feature in fetch_polling_station_geometries() ])))
def union( features: Union[List[Feature], FeatureCollection] ) -> Union[Feature, FeatureCollection]: """ Given list of features or ``FeatureCollection`` return union of those. :param features: A list of GeoJSON features or FeatureCollection. :return: A GeoJSON Feature or FeatureCollection. Example: >>> from turfpy.transformation import union >>> from geojson import Feature, Polygon, FeatureCollection >>> f1 = Feature(geometry=Polygon([[ ... [-82.574787, 35.594087], ... [-82.574787, 35.615581], ... [-82.545261, 35.615581], ... [-82.545261, 35.594087], ... [-82.574787, 35.594087] ... ]]), properties={"fill": "#00f"}) >>> f2 = Feature(geometry=Polygon([[ ... [-82.560024, 35.585153], ... [-82.560024, 35.602602], ... [-82.52964, 35.602602], ... [-82.52964, 35.585153], ... [-82.560024, 35.585153]]]), properties={"fill": "#00f"}) >>> union(FeatureCollection([f1, f2], properties={"combine": "yes"})) """ shapes = [] properties_list = [] if isinstance(features, list): for f in features: if f.type != "Feature": raise Exception("Not a valid feature") geom = get_geom(f) s = shape(geom) shapes.append(s) if "properties" in f.keys(): properties_list.append(f["properties"]) else: if "features" not in features.keys(): raise Exception("Invalid FeatureCollection") if "properties" in features.keys(): properties_list.append(features["properties"]) for f in features["features"]: geom = get_geom(f) s = shape(geom) shapes.append(s) if "properties" in f.keys(): properties_list.append(f["properties"]) result = cascaded_union(shapes) result = mapping(result) properties = merge_dict(properties_list) if result["type"] == "GeometryCollection": features = [] for geom in result["geometries"]: features.append(Feature(geometry=geom)) return FeatureCollection(features, properties=properties) return Feature(geometry=result, properties=properties)
def parse_poly(o): fp = open('./poly/' + o.state.data_id + '.poly', "r") lines = fp.readlines() in_ring = False coords = [] for (index, line) in enumerate(lines): if index == 0: # first line is junk. continue elif index == 1: # second line is the first polygon ring. coords.append([[], []]) ring = coords[-1][0] in_ring = True elif in_ring and line.strip() == 'END': # we are at the end of a ring, perhaps with more to come. in_ring = False elif in_ring: # we are in a ring and picking up new coordinates. ring.append(map(float, line.split())) elif not in_ring and line.strip() == 'END': # we are at the end of the whole polygon. break elif not in_ring and line.startswith('!'): # we are at the start of a polygon part hole. coords[-1][1].append([]) ring = coords[-1][1][-1] in_ring = True elif not in_ring: # we are at the start of a polygon part. coords.append([[], []]) ring = coords[-1][0] in_ring = True pco = pyclipper.PyclipperOffset() print("len coords =", len(coords)) path = [] for polygon in coords: if len(polygon[0]) == 0: continue path = [] for points in polygon: for point in points: coord = list(point) print("coord", coord[0], coord[1]) coord = (int(coord[0] * 100000), int(coord[1] * 100000)) print("coord", coord[0], coord[1]) path.append(coord) print(path) print("--------------") # FIXME # path = ((171, 495), (171, 497), (175, 497), (175, 495), (171, 495)) # path = ((180, 200), (260, 200), (260, 150), (180, 150)) print("path", path) # http://www.angusj.com/delphi/clipper/documentation/Docs/Units/ClipperLib/Classes/ClipperOffset/Methods/AddPath.htm pco.AddPath(path, pyclipper.JT_MITER, pyclipper.ET_CLOSEDPOLYGON) solution = pco.Execute(2.0) #0.84 cca 1 m solution = solution[0] print("solution", solution) solution.append(solution[0]) print("solution", solution) i = 0 for point in solution: solution[i] = (point[0] / 100000, point[1] / 100000) i += 1 print("solution", solution) test = Polygon([solution]) my_feature = Feature(geometry=test) feature_collection = FeatureCollection([my_feature]) print() print(feature_collection)
for x in vor.regions[region]: #Not sure how to map the "infinite" point, so, leave off those regions for now: if x == -1: break else: #Get the vertex out of the list, and flip the order for folium: vertex = vor.vertices[x] vertex = (vertex[1], vertex[0]) vertex_list.append(vertex) #Save the vertex list as a polygon and then add to the feature_list: polygon = Polygon([vertex_list]) feature = Feature(geometry=polygon, properties={}) feature_list.append(feature) #Write the features to the new file: feature_collection = FeatureCollection(feature_list) print(feature_collection, file=vorJSON) vorJSON.close() #Add the voronoi layer to the map: #mapVor.geo_json(geo_path= 'libVor.json', fill_color = "BuPu", # fill_opacity=0.01, line_opacity=0.25) #lib_geo = os.path.join('data', 'libVor.json') mapVor.choropleth(geo_data='libVorBike.json', fill_color="BuPu", fill_opacity=0.01, line_opacity=0.25) #folium.LayerControl().add_to(mapVor) mapVor.save(outfile='libVorBike.html')
def get(self, **kwargs): url = kwargs['url'] rip_files_to_process = kwargs['rip_current_files'] output_directory = kwargs['output_directory'] stations = kwargs['stations'] self.logger.debug("Querying url: %s" % (url)) rip_scraper = RipCurrentScraper() try: files = rip_scraper.directory_listing(url) for file_name in rip_files_to_process: #Search for the file in the directory listing to determine it's last update time last_modded_file = next( (item for item in files if item["file_url"].find(file_name) != -1), None) #Open up, if available, the last data we had for station. try: file_parts = os.path.splitext(file_name) local_file = os.path.join(output_directory, '%s.json' % (file_parts[0])) self.logger.debug("Checking last file downloaded: %s" % (local_file)) local_file_date = None with open(local_file, 'r') as local_data_file: json_data = json.load(local_data_file) #All the features will have the same date, so let's just get the first one. feature = json_data['features'][0] local_file_date = du_parser.parse( feature['properties']['date']) except Exception as e: self.logger.exception(e) download_latest = False if local_file_date is not None: if last_modded_file['last_modified'] > local_file_date: self.logger.debug( "Remote date: %s newer than our last date: %s" % (last_modded_file['last_modified'], local_file_date)) download_latest = True else: download_latest = True if download_latest: file_url = urlparse.urljoin(url, file_name) self.logger.debug("Downloading file url: %s" % (file_url)) file_data = rip_scraper.download_file(file_url) try: feat_collection = None features = [] self.logger.debug("Writing new local file: %s" % (local_file)) for row in file_data.split('\n'): if (len(row)): cols = row.replace('"', '').replace(',', '').split('|') #Is the station one we want? if cols[0] in stations: features.append( Feature( geometry=Point((float(cols[1]), float(cols[2]))), properties={ 'id': cols[0], 'description': cols[3], 'nws_area': cols[4], 'flag': cols[5], 'level': cols[6], 'date': last_modded_file[ 'last_modified'].strftime( '%Y-%m-%d %H:%M:%S') })) feat_collection = FeatureCollection(features) with open(local_file, 'w') as local_data_file: json.dump(feat_collection, local_data_file) except Exception as e: self.logger.exception(e) except Exception as e: self.logger.exception(e)
def main(): ext = "" if opts['format'] == "GeoJSON": ext = "json" elif opts['format'] == "GPKG": ext = "gpkg" elif opts['format'] == "DXF": ext = "dxf" elif opts['format'] == "ESRI Shapefile": ext = "shp" # Open dsm dsm = rio.open(opts['dsm']) # Read the tiff as an numpy masked array dsm_array = dsm.read(1, masked=True) # Create a kernel based on the parameter 'noise_filter_size' and the tiff resolution kernel = get_kernel(float(opts['noise_filter_size']), dsm) # Check if we want to use the dtm also if opts['dtm'] != '': # Open the dtm dtm = rio.open(opts['dtm']) # Assert that the dtm and dsm have the same bounds and resolution assert_same_bounds_and_resolution(dsm, dtm) # Calculate the different between the dsm and dtm array = calculate_difference(dsm_array, dtm) else: array = dsm_array # Calculate the ranges based on the parameter 'intervals' and the elevation array ranges = calculate_ranges(opts['intervals'], array) features = [] for bottom, top in ranges: # Binarize the image. Everything in [bottom, top) is white. Everything else is black surface_array = np.ma.where((bottom <= array) & (array < top), 255, 0).astype(np.uint8) # Apply kernel to reduce noise without_noise = cv2.morphologyEx( surface_array, cv2.MORPH_CLOSE, kernel) if kernel is not None else surface_array # Find contours contours, hierarchy = cv2.findContours(without_noise, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE) # Check if we found something if len(contours) > 0: # Transform contours from pixels to coordinates mapped_contours = [ map_pixels_to_coordinates(dsm, opts['epsg'], to_pixel_format(contour)) for contour in contours ] # Build the MultiPolygon for based on the contours and their hierarchy built_multi_polygon = LevelBuilder( bottom, top, mapped_contours, hierarchy[0]).build_multi_polygon() features.append(built_multi_polygon) # Write the GeoJSON to a file dump = dumps(FeatureCollection(features)) with open("output.json", 'w+') as output: output.write(dump) if ext != "json": subprocess.check_call([ "ogr2ogr", "-f", opts['format'], "output.%s" % ext, "output.json" ], stdout=subprocess.DEVNULL) if os.path.isfile("output.%s" % ext): if opts['format'] == "ESRI Shapefile": ext = "zip" os.makedirs("contours") contour_files = glob.glob("output.*") for cf in contour_files: shutil.move(cf, os.path.join("contours", os.path.basename(cf))) shutil.make_archive('output', 'zip', 'contours/') print(os.path.join(os.getcwd(), "output.%s" % ext)) else: print("error")
def getPathGeometry(id_map_source_public, id_map_source_private, id_map_target_private, id_map_target_public, tt_public, tt_private, parent_tree_public, parent_tree_private): global graph markers = {} # data = request.get_json(force=True) # marker_id = int(data['id']) # location_type = data['location_type'] for marker_id in id_map_source_public: for target_id in id_map_target_public: res_none = { "path_geom": None, "tt_public": None, "tt_private": None } # if location_type == "source": # if len(id_map_source_public) <= 1 or len(id_map_target_public) > 1: # markers[marker_id] = {target_id : res_none} source_public = id_map_source_public[marker_id] source_private = id_map_source_private[marker_id] # target_id = None # for key in id_map_target_public: # target_id = key target_public = id_map_target_public[target_id] target_private = id_map_target_private[target_id] # else: # if len(id_map_source_public) > 1: # res = {"path_geom": -1, "tt_public": -1, "tt_private": -1} # markers[marker_id] = {target_id : res} # # # if len(id_map_source_public) > 1 and len(id_map_target_public) == 1: # markers[marker_id] = {target_id : res_none} # # source_id = None # for key in id_map_source_public: # source_id = key # # source_public = id_map_source_public[source_id] # source_private = id_map_source_private[source_id] # # node_id_public = id_map_target_public[marker_id] # node_id_private = id_map_target_private[marker_id] paths = [] pathPublic = Path(parent_tree_public[source_public]) pathPublicGeom = pathPublic.getPathGeometry( graph, target_public, region) if pathPublicGeom is None: if marker_id in markers: markers[marker_id][target_id] = res_none else: markers[marker_id] = {target_id: res_none} continue properties = {'line-color': 'b'} feature = Feature(geometry=pathPublicGeom, properties=properties) paths.append(feature) pathPrivate = Path(parent_tree_private[source_private]) pathPrivateGeom = pathPrivate.getPathGeometry( graph, target_private, region) if pathPrivateGeom is None: if marker_id in markers: markers[marker_id][target_id] = res_none else: markers[marker_id] = {target_id: res_none} continue properties = {'line-color': 'r'} feature = Feature(geometry=pathPrivateGeom, properties=properties) paths.append(feature) tt_node_public = round(tt_public[source_public][target_public] / 60) tt_node_private = round( tt_private[source_private][target_private] / 60) fc = FeatureCollection(paths) res = { "path_geom": fc, "tt_public": tt_node_public, "tt_private": tt_node_private } if marker_id in markers: markers[marker_id][target_id] = res else: markers[marker_id] = {target_id: res} return markers
def get_projects(format): """Gets projects data from fablabs.io.""" projects_json = data_from_fablabs_io(fablabs_io_projects_api_url_v0) projects = {} project_url = "https://www.fablabs.io/projects/" fablabs = get_labs(format="object") # Load all the FabLabs for i in projects_json["projects"]: i = i["projects"] current_project = Project() current_project.id = i["id"] current_project.title = i["title"] current_project.description = i["description"] current_project.github = i["github"] current_project.web = i["web"] current_project.dropbox = i["dropbox"] current_project.bitbucket = i["bitbucket"] current_project.lab_id = i["lab_id"] # Add the lab of the project if i["lab_id"] is not None: for k in fablabs: if fablabs[k].id == i["lab_id"]: current_project.lab = fablabs[k] else: current_project.lab = None current_project.owner_id = i["owner_id"] current_project.created_at = i["created_at"] current_project.updated_at = i["updated_at"] current_project.vimeo = i["vimeo"] current_project.flickr = i["flickr"] current_project.youtube = i["youtube"] current_project.drive = i["drive"] current_project.twitter = i["twitter"] current_project.facebook = i["facebook"] current_project.googleplus = i["googleplus"] current_project.instagram = i["instagram"] current_project.status = i["status"] current_project.version = i["version"] current_project.faq = i["faq"] current_project.scope = i["scope"] current_project.community = i["community"] current_project.lookingfor = i["lookingfor"] current_project.cover = i["cover"] url = project_url + str(current_project.id) current_project.url = url # Add the project projects[current_project.id] = current_project # Return a dictiornary / json if format.lower() == "dict" or format.lower() == "json": output = {} for j in projects: project_dict = projects[j].__dict__ # Convert the lab from a Fab Lab object to a dict if project_dict["lab"] is not None: project_dict["lab"] = project_dict["lab"].__dict__ output[j] = project_dict # Return a geojson, only for projects linked to a lab elif format.lower() == "geojson" or format.lower() == "geo": projects_list = [] for p in projects: if projects[p].lab_id is not None: single_project = projects[p].__dict__ if projects[p].lab is not None: single_project["lab"] = single_project["lab"].__dict__ for l in fablabs: single_lab = fablabs[l].__dict__ if single_lab["id"] == single_project["lab_id"]: project_lab = Feature(type="Feature", geometry=Point( (single_lab["latitude"], single_lab["longitude"])), properties=single_project) projects_list.append(project_lab) output = dumps(FeatureCollection(projects_list)) # Return an object elif format.lower() == "object" or format.lower() == "obj": output = projects # Default: return an object else: output = projects return output
def worker(): # read json + reply global graph, dataManager, region data = request.get_json(force=True) map_source_coord = {} map_target_coord = {} id_map_source_public = {} id_map_target_public = {} id_map_source_private = {} id_map_target_private = {} sources_coordinates = data['sources'] targets_coordinates = data['targets'] polygon_source_coords = data['polygon_source_coords'] polygon_target_coords = data['polygon_target_coords'] selected_neighborhoods = data['selected_neighborhoods'] removed_routes = None removed_stops = None removed_segments = None target_markers = False if "removed_routes" in data and data["removed_routes"]: removed_routes = data["removed_routes"] if "removed_stops" in data and data["removed_stops"]: removed_stops = getRemovedStops(data["removed_stops"]) if "removed_segments" in data and data["removed_segments"]: removed_segments = { int(old_key): set(val) for old_key, val in data["removed_segments"].items() } if sources_coordinates: #Source is a set of markers sources_private, sources_public = getNodesFromMarkersCoordinates( sources_coordinates, "source", id_map_source_public, id_map_source_private, id_map_target_private, id_map_target_public, map_source_coord, map_target_coord) elif polygon_source_coords: #Source is a polygon" sources_private, sources_public = getNodesWithinPolygon( polygon_source_coords, "source", id_map_source_public, id_map_source_private, id_map_target_private, id_map_target_public, map_source_coord, map_target_coord) if targets_coordinates: #Target is a set of markers target_markers = True targets_private, targets_public = getNodesFromMarkersCoordinates( targets_coordinates, "target", id_map_source_public, id_map_source_private, id_map_target_private, id_map_target_public, map_source_coord, map_target_coord) elif polygon_target_coords: #Target is a polygon targets_private, targets_public = getNodesWithinPolygon( polygon_target_coords, "target", id_map_source_public, id_map_source_private, id_map_target_private, id_map_target_public, map_source_coord, map_target_coord) elif selected_neighborhoods: #Target is a neighborhood targets_private, targets_public = getNodesWithinNeighborhoods( selected_neighborhoods, "target", id_map_source_public, id_map_source_private, id_map_target_private, id_map_target_public, map_source_coord, map_target_coord) time = data['timestamp'] timestamp = datetime.fromtimestamp(time / 1000) node_colors, colored_type, path_markers = computeRelativeReachability( target_markers, sources_public, targets_public, sources_private, targets_private, id_map_source_public, id_map_source_private, id_map_target_private, id_map_target_public, timestamp, removed_routes, removed_stops, removed_segments) if colored_type == "source": map_coord = map_source_coord else: map_coord = map_target_coord markers = [] allColored = True for n in map_coord: if n not in node_colors: allColored = False continue n_coord = map_coord[n] point = Point([n_coord[1], n_coord[0]]) properties = {'marker-color': node_colors[n]} feature = Feature(geometry=point, properties=properties) markers.append(feature) gc = FeatureCollection(markers) return {"geom": gc, "allColored": allColored, "pathGeom": path_markers}
def sheet_to_json_onefile(obj1, obj2, obj3, file_name): features = [] for row in islice(obj1, 1, None): # required fields, the script will break without these name = row[0] desc = row[6] obj_lat = row[2] obj_long = row[3] zoom = row[11] or '0' bearing = row[12] or '0' pitch = row[13] or '0' # non essential fields. uses a python "ternary" operator to populate field with empty string if falsey address = row[1] or '' phone = row[4] or '' website = row[5] or '' rel_id = row[7] or '' image = row[8] or '' caption = row[9] or '' credit = row[10] or '' # if lat or long field isn't populated, move to the next record if not obj_lat or not obj_long: continue else: obj_properties = { "type": "nowopen", "name": name, "desc": desc, "address": address, "phone": phone, "website": website, "rel_id": rel_id, "image": image, "caption": caption, "credit": credit, "zoom": float(zoom), "bearing": float(bearing), "pitch": float(pitch) } obj_point = Point((float(obj_lat), float(obj_long))) obj_feature = Feature(geometry=obj_point, properties=obj_properties) features.append(obj_feature) for row in islice(obj2, 1, None): # required fields, the script will break without these name = row[0] desc = row[6] obj_lat = row[2] obj_long = row[3] zoom = row[11] or '0' bearing = row[12] or '0' pitch = row[13] or '0' # non essential fields. uses a python "ternary" operator to populate field with empty string if falsey address = row[1] or '' phone = row[4] or '' website = row[5] or '' rel_id = row[7] or '' image = row[8] or '' caption = row[9] or '' credit = row[10] or '' # if lat or long field isn't populated, move to the next record if not obj_lat or not obj_long: continue else: obj_properties = { "type": "announced", "name": name, "desc": desc, "address": address, "phone": phone, "website": website, "rel_id": rel_id, "image": image, "caption": caption, "credit": credit, "zoom": float(zoom), "bearing": float(bearing), "pitch": float(pitch) } obj_point = Point((float(obj_lat), float(obj_long))) obj_feature = Feature(geometry=obj_point, properties=obj_properties) features.append(obj_feature) for row in islice(obj3, 1, None): # required fields, the script will break without these name = row[0] desc = row[6] obj_lat = row[2] obj_long = row[3] zoom = row[11] or '0' bearing = row[12] or '0' pitch = row[13] or '0' # non essential fields. uses a python "ternary" operator to populate field with empty string if falsey address = row[1] or '' phone = row[4] or '' website = row[5] or '' rel_id = row[7] or '' image = row[8] or '' caption = row[9] or '' credit = row[10] or '' # if lat or long field isn't populated, move to the next record if not obj_lat or not obj_long: continue else: obj_properties = { "type": "closing", "name": name, "desc": desc, "address": address, "phone": phone, "website": website, "rel_id": rel_id, "image": image, "caption": caption, "credit": credit, "zoom": float(zoom), "bearing": float(bearing), "pitch": float(pitch) } obj_point = Point((float(obj_lat), float(obj_long))) obj_feature = Feature(geometry=obj_point, properties=obj_properties) features.append(obj_feature) feature_collection = FeatureCollection(features) with open(file_name, 'w') as f: dump(feature_collection, f) return
# export convex hull intersection to geojson cvex_hull = cvex_hull_intersect.__geo_interface__ # for each evac time we create a unique GeoJSON polygon output_ply = "../geodata/ch08-03_dist_poly_" + str( evac_time) + ".geojson" write_geojson(output_ply, cvex_hull) output_geojson_route = "../geodata/ch08-03_dist_pts_" + str( evac_time) + ".geojson" # save GeoJSON to a file in our geodata folder write_geojson(output_geojson_route, geojs_fc) # create or set of evac GeoJSON polygons based # on location and list of times in seconds generate_evac_polys(start_node_id, evac_times) # final result GeoJSON final_res = FeatureCollection(combined_result) # write to disk write_geojson("../geodata/ch08-03_final_dist_poly.geojson", final_res) # clean up and close database cursor and connection cur.close() conn.close()
def process(filename): raster=filename ds=gdal.Open(raster) time=ds.GetMetadataItem("TIFFTAG_DATETIME") #print time gt=ds.GetGeoTransform() originX = gt[0] originY = gt[3] pixelX = gt[1] pixelY = gt[5] cols = ds.RasterXSize rows = ds.RasterYSize #print cols #print rows originX = originX + pixelX*cols/2.0 originY = originY + pixelY*rows/2.0 #Get the size of the image xLength=abs(pixelX*cols) yLength=abs(pixelY*rows) #print xLength #print yLength #Get area of the image area=xLength*yLength #print area old_cs= osr.SpatialReference() old_cs.ImportFromWkt(ds.GetProjectionRef()) # create the new coordinate system wgs84_wkt = """ GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.01745329251994328, AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4326"]]""" new_cs = osr.SpatialReference() new_cs .ImportFromWkt(wgs84_wkt) # create a transform object to convert between coordinate systems transform = osr.CoordinateTransformation(old_cs,new_cs) #get the coordinates in lat long latlong = transform.TransformPoint(originX, originY) longitude = latlong[0] latitude = latlong[1] #print latitude #print longitude filerootname = raster.split(".")[0] lat = str(round(latitude,3))+" N" if(latitude < 0): lat = str(round(-latitude,3))+" S" longt = str(round(longitude,3))+" E" if(longitude < 0): longt = str(round(-longitude,3))+" W" description={"Name":filerootname, "Size":str(xLength)+" x "+str(yLength), "Area":str(area), "Time":str(time), "Co-ordinates":str(lat)+", "+str(longt)} marker = Feature(geometry=Point((longitude, latitude)), properties=description) geojsonfile = FeatureCollection([marker],name=filerootname) with open(filerootname+'.json', 'w') as outfile: json.dump(geojsonfile, outfile, sort_keys=True) result_filename=filerootname+'.json' result=open(os.path.join(cwd, result_filename)) return result
def generate_evac_polys(start_node_id, evac_times): """ :param start_node_id: network node id to start from :param evac_times: list of times in seconds :return: none, generates GeoJSON files """ for evac_time in evac_times: distance_poly_query = """ SELECT seq, id1 AS node, cost, ST_AsGeoJSON(the_geom) FROM pgr_drivingDistance( 'SELECT ogc_fid AS id, source, target, ST_Length(wkb_geometry)/5000*60*60 AS cost FROM geodata.ch08_e01_networklines', {0}, {1}, false, false ) as ev_dist JOIN geodata.ch08_e01_networklines_vertices_pgr AS networklines ON ev_dist.id1 = networklines.id; """.format(start_node_id, evac_time) cur.execute(distance_poly_query) # get entire query results to work with distance_nodes = cur.fetchall() # empty list to hold each segment for our GeoJSON output route_results = [] # loop over each segment in the result route segments # create the list of our new GeoJSON for dist_node in distance_nodes: sequence = dist_node[0] # sequence number node = dist_node[1] # node id cost = dist_node[2] # cost value geojs = dist_node[3] # geometry geojs_geom = loads(geojs) # create geojson geom geojs_feat = Feature(geometry=geojs_geom, properties={ 'sequence_num': sequence, 'node': node, 'evac_time_sec': cost, 'evac_code': evac_time }) # add each point to total including all points combined_result.append(geojs_feat) # add each point for individual evacuation time route_results.append(geojs_geom) # geojson module creates GeoJSON Feature Collection geojs_fc = FeatureCollection(route_results) # create list of points for each evac time evac_time_pts = [ asShape(route_segment) for route_segment in route_results ] # create MultiPoint from our list of points for evac time point_collection = geometry.MultiPoint(list(evac_time_pts)) # create our convex hull polyon around evac time points convex_hull_polygon = point_collection.convex_hull # intersect convex hull with hallways polygon (ch = convex hull) cvex_hull_intersect = e01_hallway_shply.intersection( convex_hull_polygon) # export convex hull intersection to geojson cvex_hull = cvex_hull_intersect.__geo_interface__ # for each evac time we create a unique GeoJSON polygon output_ply = "../geodata/ch08-03_dist_poly_" + str( evac_time) + ".geojson" write_geojson(output_ply, cvex_hull) output_geojson_route = "../geodata/ch08-03_dist_pts_" + str( evac_time) + ".geojson" # save GeoJSON to a file in our geodata folder write_geojson(output_geojson_route, geojs_fc)
def _normalize(geojson): if geojson["type"] != "FeatureCollection": if geojson["type"] != "Feature": return FeatureCollection([Feature(geometry=geojson)]) return FeatureCollection([geojson]) return geojson
def visualisation(permutation, iti_matrix, mydf, j_piece): j_list = [] # draw folium graph str_fea_list = [] tooltip = 'Click For More Info' des_dict = { 'WALK': 'Walk to ', 'SUBWAY': 'Take subway to ', 'BUS': 'Take bus to ' } m = folium.Map(location=[1.2791, 103.8154], zoom_start=12) for i in range(len(permutation) - 1): # for one itinerary sta_plc_idx = permutation[i] end_plc_idx = permutation[i + 1] itinerary = iti_matrix[sta_plc_idx][end_plc_idx] true_sta_pt = np.array((mydf._get_value(sta_plc_idx, 'latitude'), mydf._get_value(sta_plc_idx, 'longitude'))) true_end_pt = np.array((mydf._get_value(end_plc_idx, 'latitude'), mydf._get_value(end_plc_idx, 'longitude'))) temp_num_legs = len(itinerary['legs']) # num of legs pt_lat = [] pt_lon = [] tpl_list = [] pt_name = [] mode_list = [] dist_list = [] for k in range(temp_num_legs): # for each leg pt_lon.append(itinerary['legs'][k]['from']['lon']) pt_lat.append(itinerary['legs'][k]['from']['lat']) tpl_list.append((itinerary['legs'][k]['from']['lon'], itinerary['legs'][k]['from']['lat'])) pt_name.append(itinerary['legs'][k]['to']['name']) mode_list.append(des_dict[itinerary['legs'][k]['mode']]) dist_list.append( str(round(float(itinerary['legs'][k]['distance']) / 1000, 2)) + ' km.') if k == temp_num_legs - 1: pt_lon.append(itinerary['legs'][k]['to']['lon']) pt_lat.append(itinerary['legs'][k]['to']['lat']) tpl_list.append((itinerary['legs'][k]['to']['lon'], itinerary['legs'][k]['to']['lat'])) temp_feature = Feature(geometry=MultiLineString([tpl_list]), properties={'stroke': '#AF4646'}) str_fea_list.append(temp_feature) first_point = np.array((pt_lat[0], pt_lon[0])) distance1 = np.linalg.norm(first_point - true_sta_pt) distance2 = np.linalg.norm(first_point - true_end_pt) start_point = [pt_lat[0], pt_lon[0]] end_point = [pt_lat[-1], pt_lon[-1]] iterator = range(len(mode_list)) # only affect formatting the text string = '' if distance1 > distance2: iterator = range(len(mode_list) - 1, -1, -1) start_point = [pt_lat[-1], pt_lon[-1]] end_point = [pt_lat[0], pt_lon[0]] counter = 0 for j in iterator: string += str(counter+1)+'. ' + \ mode_list[j]+pt_name[j] + \ '. Estimated distance is '+dist_list[j]+'\n' counter += 1 folium.Marker( start_point, popup='<strong>' + string + '</strong>', tooltip=tooltip, icon=folium.Icon(icon='trophy' if i != 0 else 'flag')).add_to(m), folium.Marker(end_point, icon=folium.Icon(icon='trophy' if i != len(permutation) - 2 else 'star')).add_to(m) temp_j_ele = {} temp_j_ele['order'] = i + 1 temp_j_ele['poi_name'] = mydf._get_value(end_plc_idx, 'poi_name') temp_j_ele['time_to_spend'] = mydf._get_value(end_plc_idx, 'time') temp_j_ele['time_to_travelhere'] = str( timedelta(seconds=int(itinerary['duration']))) temp_j_ele['description'] = mydf._get_value(end_plc_idx, 'description') j_list.append(temp_j_ele) j = {'basics': j_piece, 'itinerary': j_list} j_file = json.dumps(j) feature_collection = FeatureCollection(str_fea_list) ms = geojson.dumps(feature_collection) folium.GeoJson(ms, name='multistring').add_to(m) # Generate map render_m = m.get_root().render() # insert value into map_database map_engine = map_db_init() insert_req = "INSERT INTO map_db (id,map_html) VALUES (default," + \ "'"+render_m+"')" cursor = map_engine.cursor() cursor.execute(insert_req) map_engine.commit() return j_file
diff_lat = 3.75 / 3600 diff_lng = 5.625 / 3600 lat2, lng2 = lat1, lng1 + diff_lng lat3, lng3 = lat1 + diff_lat, lng1 + diff_lng lat4, lng4 = lat1 + diff_lat, lng1 coodinates = list() coodinates.append([lng1, lat1]) coodinates.append([lng2, lat2]) coodinates.append([lng3, lat3]) coodinates.append([lng4, lat4]) coodinates.append([lng1, lat1]) p = Polygon([coodinates]) return Feature(meshcode, geometry=p) if __name__ == "__main__": ll2m = LatLngToMesh() lat, lng = 35.700001, 139.800001 meshcode = ll2m.convert2Mesh(3, lat, lng) print(meshcode) print(ll2m.convert2LatLng(meshcode)) mesh_list = list() mesh_list.append(ll2m.convert2LatLng(meshcode)) fc = FeatureCollection(mesh_list) with open('./mesh.json', 'w', encoding='utf-8') as fp: fp.write(geojson.dumps(fc, indent=2))
def run_route(start_node_id, end_node_id, route_type): """ :param start_node_id: :param end_node_id: :param route_type: :param route_options: a dictionary :return: """ # TODO add route options dictionary # TODO add parameter to function route_options=None # sample dictionary of options # route_options = {'route_types': { # 'standard_route': 1, # 'barrierfree route': 2, # 'indoor_only_prefered': 3, # 'fastest': 4 # }, # 'route_logic': { # 'force_route_through_location': True # } # } cur = connection.cursor() base_route_q = """SELECT id, source, target, total_cost:: DOUBLE PRECISION AS cost, floor, network_type FROM geodata.networklines_3857""" # set default query barrierfree_q = "WHERE 1=1" if route_type == "1": # exclude all networklines of type stairs barrierfree_q = "WHERE network_type not in (1,3)" routing_query = ''' SELECT seq, id1 AS node, id2 AS edge, ST_Length(geom) AS cost, floor, network_type, ST_AsGeoJSON(geom) AS geoj FROM pgr_dijkstra(' {normal} {type}', %s, %s, FALSE, FALSE ) AS dij_route JOIN geodata.networklines_3857 AS input_network ON dij_route.id2 = input_network.id ; '''.format(normal=base_route_q, type=barrierfree_q) # run our shortest path query if start_node_id or end_node_id: if start_node_id != end_node_id: cur.execute(routing_query, (start_node_id, end_node_id)) else: logger.error("start or end node is None or is the same node " + str(start_node_id)) return HttpResponseNotFound('<h1>Sorry NO start or end node' ' found within 200m</h1>') # get entire query results to work with route_segments = cur.fetchall() route_info = calc_distance_walktime(route_segments) # empty list to hold each segment for our GeoJSON output route_result = [] # loop over each segment in the result route segments # create the list of our new GeoJSON for segment in route_segments: seg_length = segment[3] # length of segment layer_level = segment[4] # floor number seg_type = segment[5] seg_node_id = segment[1] seq_sequence = segment[0] geojs = segment[6] # geojson coordinates geojs_geom = loads(geojs) # load string to geom geojs_feat = Feature(geometry=geojs_geom, properties={'floor': layer_level, 'length': seg_length, 'network_type': seg_type, 'seg_node_id': seg_node_id, 'sequence': seq_sequence} ) route_result.append(geojs_feat) # using the geojson module to create our GeoJSON Feature Collection geojs_fc = FeatureCollection(route_result) geojs_fc.update(route_info) return geojs_fc