def test_silly_geo_to_h3(): lat, lng = 37.3615593, -122.0553238 expected0 = '85283473fffffff' out0 = h3.geo_to_h3(lat, lng, 5) assert out0 == expected0 out1 = h3.geo_to_h3(lat + 180.0, lng + 360.0, 5) expected1 = '85ca2d53fffffff' assert out1 == expected1
def add_test_line_edges(self, test_lines): og_lines = copy.copy(test_lines) test_lines = test_lines.to_crs("epsg:3857") max_node_full_graph = max(self.convert_ids.keys()) flip_node = {v: k for k, v in self.convert_ids.items()} income_fn = f'{self.where}/{self.where.lower()}_income_pct.csv' target_hex = [] if os.path.isfile(income_fn): income_df = pd.read_csv(income_fn) income_df = income_df[income_df['pct_above'] >= 0.5] target_hex = income_df['hex'].to_list() for i, line in enumerate(test_lines.geometry): demand, node = line.coords[:] s_coord_string = f'[{demand[0]:.0f}, {demand[1]:.0f}]' e_coord_string = f'[{node[0]:.0f}, {node[1]:.0f}]' end = self.look_up.get(e_coord_string, None) start = self.look_up.get(s_coord_string, None) if end is None: continue if start: continue if self.edges.get((start, end), False) or self.edges.get((end, start), False): continue if start is None: start = self.index max_node_full_graph += 1 self.convert_ids[max_node_full_graph] = start self.look_up[s_coord_string] = start self.index += 1 self.demand_nodes[start] = 1 self.demand_nodes[end] = 1 self.nodes_to_connect.add(start) if target_hex: ll_line_demand, ll_line_node = og_lines.geometry.iloc[i].coords[:] demand_hex = h3.geo_to_h3(ll_line_demand[1], ll_line_demand[0], 10) node_hex = h3.geo_to_h3(ll_line_node[1], ll_line_node[0], 10) if demand_hex in target_hex or node_hex in target_hex: self.demand_nodes[start] = 0 self.demand_nodes[end] = 0 self.nodes_to_connect.remove(start) self.edge_to_geom[(max_node_full_graph, flip_node[end])] = line.wkt self.edges[(start, end)] = line.length self.flip_look_up = {v: k for k, v in self.look_up.items()}
def add_h3_index(gdf): coords = gdf[["lat", "lng"]].values index_7, index_8, index_9 = [], [], [] for coord in tqdm(coords): index_7.append(h3.geo_to_h3(coord[0], coord[1], 7)) index_8.append(h3.geo_to_h3(coord[0], coord[1], 8)) index_9.append(h3.geo_to_h3(coord[0], coord[1], 9)) gdf["h3_7"] = index_7 gdf["h3_8"] = index_8 gdf["h3_9"] = index_9 return gdf
def test_validation_geo(): h = '8a28308280fffff' # invalid hex with pytest.raises(H3CellError): h3.h3_to_geo(h) with pytest.raises(H3ResolutionError): h3.geo_to_h3(0, 0, 17) with pytest.raises(H3CellError): h3.h3_to_geo_boundary(h) with pytest.raises(H3CellError): h3.h3_indexes_are_neighbors(h, h)
def test_areas_at_00(): areas_km2 = [ 2.562182162955495529e+06, 4.476842018179409206e+05, 6.596162242711056024e+04, 9.228872919002589697e+03, 1.318694490797110348e+03, 1.879593512281297762e+02, 2.687164354763186225e+01, 3.840848847060638782e+00, 5.486939641329895423e-01, 7.838600808637447015e-02, 1.119834221989390345e-02, 1.599777169186613647e-03, 2.285390931423379875e-04, 3.264850232091780848e-05, 4.664070326136773890e-06, 6.662957615868890711e-07, ] out = [h3.cell_area(h3.geo_to_h3(0, 0, r), unit='km^2') for r in range(16)] assert approx2(out, areas_km2) areas_rads2 = [ 6.312389871006786335e-02, 1.102949377223657809e-02, 1.625081476657283096e-03, 2.273696413041990331e-04, 3.248837599063685022e-05, 4.630711750349743332e-06, 6.620305651949173071e-07, 9.462611873890716096e-08, 1.351804829317986891e-08, 1.931178237937334527e-09, 2.758910081529350229e-10, 3.941334595426616175e-11, 5.630465614578665530e-12, 8.043537197853909460e-13, 1.149076389260636790e-13, 1.641537700693487648e-14, ] out = [ h3.cell_area(h3.geo_to_h3(0, 0, r), unit='rads^2') for r in range(16) ] assert approx2(out, areas_rads2)
def add_hex(self, lat: float, lng: float, name=None): """Insert a hex into the HexDict. REQUIRES: lat: latitude of the coordinate. lng: longitude of the coordinate. EFFECTS: -> If the RES_MAX hex does not exist, then the RES_MAX hex is added, and the raw density is set to 1. -> If the RES_MAX hex already exists, then the raw density of the hex is just incremented by 1. RETURNS: Hexagon returns the RES_MAX hexagon of the given coordinates. """ hex_id = h3.geo_to_h3(lat, lng, RES_MAX) if hex_id not in self.hex_dict[RES_MAX]: self.hex_dict[RES_MAX][hex_id] = Hexagon(hex_id) self.hex_dict[RES_MAX][hex_id].raw_density = 1 else: self.hex_dict[RES_MAX][hex_id].raw_density += 1 self.hex_dict[RES_MAX][hex_id].residents.append(name) return self.hex_dict[RES_MAX][hex_id]
def lat_lng_2_h3(lat, lng, res): import h3 try: result = h3.geo_to_h3(lat, lng, res) return result except: return None # invalid coordinates will result in null index value.
def create(rs: Union[ResultSet, Query], geom_column: str = 'geom', name: Optional[str] = None, resolution: Optional[int] = 0, plot: str = "size", column: Optional[str] = None, group_by: Optional[str] = None): gdf = to_gdf(rs, geom_column) center = None zoom = None if len(gdf): if gdf.crs is None: gdf = gdf.set_crs(epsg=4326) if gdf.crs.to_epsg() != 4326: gdf = gdf.to_crs(epsg=4326) center = get_center(gdf) zoom = get_zoom(gdf) if len(gdf) and resolution: # Add H3 index hex_col = 'hex' + str(resolution) # H3 uses lat, lon gdf[hex_col] = gdf[GEOM_COL].apply( lambda geom: geo_to_h3(geom.y, geom.x, resolution), 1) if group_by: # Rows may be grouped by any field in a JSON data_to_group = QueryResult.flatten_dataframe(gdf, group_by) if "." in group_by: _, group_by = group_by.rsplit(".", 1) # Join rows with the same value in group_by field data_to_group = data_to_group.groupby([hex_col, group_by], sort=False, as_index=False).size() elif column and "." in column: # Flatten the dataframe so we may calculate means etc. for the desired column data_to_group = QueryResult.flatten_dataframe(gdf, column) _, column = column.rsplit(".", 1) else: data_to_group = gdf groupby = data_to_group.groupby(hex_col, sort=False, as_index=False) if column: groupby = groupby[column] # plot = size (or mean, or median, or max, or min) of rows within the same hex # Available functions: https://pandas.pydata.org/docs/reference/groupby.html results = getattr(groupby, plot)() # Add centroid geometry just in case. Of course, H3 has lat lon in the wrong order again centroid_lat_lon = results[hex_col].map(lambda hex: h3_to_geo(hex)) # Convert to GeoSeries first (https://github.com/Toblerity/Shapely/issues/1096#issuecomment-962988370) results[GEOM_COL] = gpd.GeoSeries( [Point(geom[1], geom[0]) for geom in centroid_lat_lon]) gdf = gpd.GeoDataFrame(results, geometry=GEOM_COL, crs=gdf.crs) gdf = gdf.set_index(hex_col) # retain hex index as column too, so it can be plotted gdf[hex_col] = gdf.index return QueryResult(gdf, center, zoom, name)
def on_post(self, req, resp): resp.status = falcon.HTTP_200 credentials = json.loads(req.stream.read()) body = json.loads(credentials["body"]) wind_power = body["windPower"]; offset = body["offset"]; resolution = body["resolution"]; trees = body["trees"]; tic = time.perf_counter() wind = Wind(); trees_out = []; for tree in trees: longitude = tree["longitude"]; latitude = tree["latitude"]; color = tree["color"]; polygon = wind.getWindLayerCoordinates(longitude, latitude, wind_power, offset) geoJson = { 'type': 'Polygon', 'coordinates': [polygon] } hexagons = h3.polyfill(geoJson, resolution) h3_index = h3.geo_to_h3(latitude, longitude, resolution); for hex in hexagons: direction = h3.h3_distance(h3_index, hex); opacity = 255 - direction * (80 / wind_power); if (opacity > 0): trees_out.append({ 'opacity': opacity, 'hex': hex, 'color': color }); toc = time.perf_counter() print(f"Downloaded the tutorial in {(toc - tic)*1000:4f} ms") content = { 'hex': json.dumps(trees_out, default=serialize_sets) } resp.body = json.dumps(content)
def counts_by_hexagon(plot_variable, df, resolution): """ Use h3.geo_to_h3 to index each data point into the spatial index of the specified resolution. Use h3.h3_to_geo_boundary to obtain the geometries of these hexagons Ex counts_by_hexagon(data, 9) """ df = df[["lat", "lng", plot_variable]] df["hex_id"] = df.apply( lambda row: h3.geo_to_h3(row["lat"], row["lng"], resolution), axis=1) df_aggreg = df.groupby(by="hex_id").size().reset_index() # print(df_aggreg.columns) # import numpy as np df_aggreg = df.groupby(['hex_id'], as_index=True).agg({ plot_variable: 'mean' }).rename(columns={ plot_variable: 'value' }).reset_index() df_aggreg = df_aggreg[['hex_id', 'value']] df_aggreg = df_aggreg[df_aggreg['value'] != np.inf] df_aggreg["geometry"] = df_aggreg.hex_id.apply( lambda x: { "type": "Polygon", "coordinates": [h3.h3_to_geo_boundary(h=x, geo_json=True)] }) return df_aggreg
def lat_lng_to_h3(): import h3 h3_key = h3.geo_to_h3(lat=48.853, lng=2.348, resolution=8) print(h3_key) # returns "881fb46625fffff"
def insertItem(data, h3index9): latlon_ary = data["latlon"].split(",") h3index8 = h3.geo_to_h3(float(latlon_ary[0]), float(latlon_ary[1]), 8) h3index7 = h3.geo_to_h3(float(latlon_ary[0]), float(latlon_ary[1]), 7) h3index6 = h3.geo_to_h3(float(latlon_ary[0]), float(latlon_ary[1]), 6) dtnow = datetime.now() time = dtnow.strftime("%Y%m%d-%H%M%S") + dtnow.strftime("%f")[0:3] data["h3-9"] = h3index9 + "_" + time data["h3-8"] = h3index8 data["h3-7"] = h3index7 data["h3-6"] = h3index6 # homepageがなければgoogleから探す if "homepage" not in data or not data["homepage"]: setSiteToData(data) checkIFrameEnableItem(data) insertItemD(data)
def h3_2set_polyfill(geometry, resolution): def to_h3(geometry): # buffering can result in MultiPolygon geometries if 'MultiPolygon' in geometry.geom_type: geometry = list(geometry.geoms) else: geometry = [geometry] hex_set = set() for p in geometry: p_geojson = shapely.geometry.mapping(p) hex_set = hex_set.union(h3.polyfill_geojson(p_geojson, resolution)) return hex_set def get_result_struct(hex_i, polygon, dirty_set): if hex_i in dirty_set: hex_polygon = shapely.geometry.Polygon( h3.h3_to_geo_boundary(hex_i, geo_json=True)) intersection = polygon.intersection(hex_polygon) if intersection.is_empty: # does not represent any area of the original geometry return None elif intersection.equals( hex_polygon): # fully contained by the original geometry return (hex_i, False, None) else: return (hex_i, True, intersection.wkb ) # partially contained by the original geometry else: return ( hex_i, False, None ) # fully contained by the original geometry (not in the dirty set) polygon = geometry # placeholder for when we are loading wkt/wkb in udfs # get centroid of the geometry # get centroid index # get centroid index geometry # compute radius based on teh minimum enclosing rectangel (radius of pseudo minimal enclosing circle) cent = polygon.centroid.xy centroid_hex = h3.geo_to_h3(cent[1][0], cent[0][0], resolution) centroid_geom = shapely.geometry.Polygon( h3.h3_to_geo_boundary(centroid_hex, geo_json=True)) radius = math.sqrt(centroid_geom.minimum_rotated_rectangle.area) / 2 dirty = polygon.boundary.buffer(radius) original_set = to_h3(polygon) dirty_set = to_h3(dirty) result = [ get_result_struct(hex_i, polygon, dirty_set) for hex_i in list(original_set.union(dirty_set)) ] result = [c for c in result if c is not None] return result
def test_h3_is_valid(): assert h3.h3_is_valid('85283473fffffff') assert h3.h3_is_valid('850dab63fffffff') assert not h3.h3_is_valid('lolwut') # H3 0.x Addresses are not considered valid assert not h3.h3_is_valid('5004295803a88') for res in range(16): assert h3.h3_is_valid(h3.geo_to_h3(37, -122, res))
def h3_2set_polyfill(geometry, resolution): # we need to account for possibility of MultiPolygon shapes def to_h3(geometry): if 'MultiPolygon' in geometry.geom_type: geometry = list(geometry.geoms) else: geometry = [geometry] hex_set = set() for p in geometry: p_geojson = shapely.geometry.mapping(p) hex_set = hex_set.union(h3.polyfill_geojson(p_geojson, resolution)) return hex_set # convenience method for converting an index to a chip of a polygon def get_result_struct(hex_i, polygon, dirty_set): if hex_i in dirty_set: hex_polygon = shapely.geometry.Polygon( h3.h3_to_geo_boundary(hex_i, geo_json=True)) intersection = polygon.intersection(hex_polygon) if intersection.is_empty: return None elif intersection.equals(hex_polygon): return (hex_i, False, None) else: return (hex_i, True, intersection.wkb) else: return (hex_i, False, None) polygon = shapely.wkb.loads(bytes(geometry)) # compute the buffer radius # we cannot use the hexagon side lenght due to curvatrure # we use minimum rotated rectange - we assume that the rectangle is near rectangle in shape # the alternative would be to iterate through boundary vertices and take the longest side cent = polygon.centroid.xy centroid_hex = h3.geo_to_h3(cent[1][0], cent[0][0], resolution) centroid_geom = shapely.geometry.Polygon( h3.h3_to_geo_boundary(centroid_hex, geo_json=True)) radius = math.sqrt(centroid_geom.minimum_rotated_rectangle.area) / 2 # any index that may touch the boundary dirty = polygon.boundary.buffer(radius) original_set = to_h3(polygon) dirty_set = to_h3(dirty) result = [ get_result_struct(hex_i, polygon, dirty_set) for hex_i in list(original_set.union(dirty_set)) ] return result
def _h3_district_corners(geojson): idx = {} for feature in geojson["features"]: for c in feature["geometry"]["coordinates"][0][0]: # resolution=9 is roughly 0.17km end to end key = h3.geo_to_h3(c[1], c[0], 9) if key not in idx: idx[key] = {feature['properties']['DISTRICT']} else: idx[key].add(feature['properties']['DISTRICT']) # if there are more than 2 districts in a set then it's a corner of more than 2 districts return {k for k, v in idx.items() if len(v) > 2}
def importLine(csvLine, fileName, forceTitle): try: data = convertCsv2Json(csvLine) if data == None: return False # telに数字以外が入ってたり、11文字より長かったらクリアする if "tel" in data and data["tel"]: if data["tel"].isdecimal() == False: data["tel"] = "" elif len(data["tel"]) > 11: data["tel"] = "" # 電話番号がない場合は住所から取得してみる if "tel" not in data or not data["tel"]: setTelAndLatLonToData(data) # 緯度経度をタイトルと住所から取得する if "latlon" not in data or not data["latlon"]: setLatLonToData(data) # 既に登録済みか確認する latlon_ary = data["latlon"].split(",") h3index9 = h3.geo_to_h3(float(latlon_ary[0]), float(latlon_ary[1]), 9) records = selectItem(data, h3index9) if records is None or len(records) == 0: insertItem(data, h3index9) elif len(records) == 1: updateItem(records[0], data, forceTitle) elif not data["tel"]: message = "CSV IMPORTER : {0}\n Alert : MULTIPLE NO TELEPHONE\n {1}".format( fileName, data["title"]) notifyToSlack(SLACK_WEBHOOK_HAMAMATSU, message) return False else: message = "CSV IMPORTER : {0}\n Alert : MULTIPLE RECORDS({1}) {2}\n {3}\n {4}".format( fileName, len(records), data["title"], records[0], records[1]) notifyToSlack(SLACK_WEBHOOK_HAMAMATSU, message) return False return True except Exception as e: logger.exception(e) message = "CSV IMPORTER : {0}\n Exception : {1}".format( fileName, e.__class__.__name__) notifyToSlack(SLACK_WEBHOOK_HAMAMATSU, message) raise
def parse_result(r): data = r['data'] lat = round(get_nested_value(data, 9, 2), 7) # 7 digits equals a precision of 1 cm lng = round(get_nested_value(data, 9, 3), 7) # 7 digits equals a precision of 1 cm # noinspection PyUnresolvedReferences h3_index = h3.geo_to_h3(lat, lng, POI_RESOLUTION) pb_id = get_nested_value(data, 10) return dict(query=r['query'], data=dict(location=dict(lat=lat, lng=lng), h3Index=h3_index, id=pb_id))
def test_to_local_ij_error(): h = h3.geo_to_h3(0, 0, 0) # error if we cross a face nb = h3.hex_ring(h, k=2) with pytest.raises(H3ValueError): [h3.experimental_h3_to_local_ij(h, p) for p in nb] # should be fine if we do not cross a face nb = h3.hex_ring(h, k=1) out = {h3.experimental_h3_to_local_ij(h, p) for p in nb} expected = {(-1, 0), (0, -1), (0, 1), (1, 0), (1, 1)} assert out == expected
def calc_h3d(cls, lat, lon, res): """ Calculate h3 distilled index for given parameters :param lat: Latitude :type lat: float :param lon: Longiture :type lon: float :param res: resolution (level, depth) of H3 index, 0-15 :type res: int :returns: h3 distilled index :rtype: int """ return h3d.h3s_to_h3d(h3.geo_to_h3(lat, lon, res))
def parse_result(r): data = r['data'][6] name = get_nested_value(data, 11) place_id = get_nested_value(data, 78) lat = round(get_nested_value(data, 9, 2), 7) # 7 digits equals a precision of 1 cm lng = round(get_nested_value(data, 9, 3), 7) # 7 digits equals a precision of 1 cm # noinspection PyUnresolvedReferences h3_index = h3.geo_to_h3(lat, lng, POI_RESOLUTION) address = get_nested_value(data, 2) timezone = get_nested_value(data, 30) categories = [t[0] for t in (get_nested_value(data, 76) or [])] opening_hours = parse_opening_hours(get_nested_value(data, 34, 1)) permanently_closed = get_nested_value(data, 88, 0) == 'CLOSED' temporarily_closed = get_nested_value(data, 96, 5, 0, 2) == 'Reopen this place' and not permanently_closed inside_of = get_nested_value(data, 93, 0, 0, 0, 1) phone = get_nested_value(data, 178, 0, 3) website = get_nested_value(data, 7, 0) rating_stars = get_nested_value(data, 4, 7) rating_number_of_reviews = get_nested_value(data, 4, 8) price_level = get_nested_value(data, 4, 2) popularity_data = get_nested_value(data, 84, 0) spending_time = parse_spending_time_data(get_nested_value(data, 117, 0)) popularity, waiting_time = None, None if popularity_data: popularity, waiting_time = parse_popularity_data(popularity_data, timezone) return dict( id=r['id'], data=dict( name=name, placeID=place_id, location=dict(lat=lat, lng=lng), h3Index=h3_index, address=address, timezone=timezone, categories=categories, temporarilyClosed=temporarily_closed, permanentlyClosed=permanently_closed, insideOf=inside_of, contact=dict(phone=phone, website=website), openingHours=opening_hours, rating=dict(stars=rating_stars, numberOfReviews=rating_number_of_reviews), priceLevel=len(price_level) if price_level else None, popularity=popularity, waitingTime=waiting_time, spendingTime=spending_time ) )
def test_from_local_ij_error(): h = h3.geo_to_h3(0, 0, 0) baddies = [(1, -1), (-1, 1), (-1, -1)] for i, j in baddies: with pytest.raises(H3ValueError): h3.experimental_local_ij_to_h3(h, i, j) # inverting output should give good data nb = h3.hex_ring(h, k=1) goodies = {h3.experimental_h3_to_local_ij(h, p) for p in nb} out = {h3.experimental_local_ij_to_h3(h, i, j) for i, j in goodies} assert out == nb
def __create_h3_from_lat_lon(self, lat, lon): """ Get h3index from latitude and longitude :param lat: :param lon: :return: """ try: return h3.geo_to_h3(float(lat), float(lon), RES) except ValueError: print("Wrong latitude or longitude values\n") return None except Exception as e: print(f"Error: {e}") return None
def h3_polyfill_extended(geometry, resolution): (cx, cy) = polygon.centroid.coords.xy # get centroid location centroid_ind = h3.geo_to_h3( cx[0], cy[0], resolution) # get h3 index containing the centroid centroid_geom = shapely.geometry.Polygon( h3.h3_to_geo_boundary(centroid_ind)) # get centroid index geometry radius = math.sqrt( centroid_geom.minimum_rotated_rectangle.area ) / 2 # find the radius of (pseudo) minimal enclosing circle (via side of the rotated enclosing square) geom_extended = geometry.buffer( distance=radius, resolution=1 ) # buffer the original geometry by the radius of minimum enclosing cicle geo_json_geom = shapely.geometry.mapping(geom_extended) indices = h3.polyfill_geojson(geo_json_geom, resolution) # get the indices return indices
def compute_reward_scale(self, lat: float, lng: float): """Generate the reward scale for a given location.""" hex_id = h3.geo_to_h3(lat, lng, RES_MAX) if hex_id not in self.hex_dict[RES_MAX]: raise Exception( "Cannot compute reward scale. Invalid starting hex.") current_hex = self.hex_dict[RES_MAX][hex_id] reward_scale = 1 while current_hex.res >= RES_MIN: parent = self[h3.h3_to_parent(current_hex.hex_id, current_hex.res - 1)] reward_scale = reward_scale * (parent.clipped_density / parent.unclipped_density) current_hex = parent return reward_scale
def load_incidents(aperture_size=None, df_freq=None): if df_freq is None: df = pd.read_pickle('resources/tdot_testing_incidents.pk') df_freq = df.groupby([ 'unit_segment_id', 'GPS Coordinate Latitude', 'GPS Coordinate Longitude', 'timestamp' ]).size().reset_index() df_freq.set_axis(['segment_id', 'lat', 'lng', 'timestamp', 'count'], axis=1, inplace=True) df_freq.loc[:, 'segment_id'] = df_freq.segment_id.apply(int) df_freq.loc[:, 'time'] = pd.to_datetime(df_freq['timestamp'], unit='s') if aperture_size is not None: aperture_size = int(aperture_size) df_freq.loc[:, 'region'] = df_freq.apply( lambda x: h3.geo_to_h3(x.lat, x.lng, aperture_size), 1) return df_freq
def filter_h3d_around(cls, lat, lon, res, k_distance=1, queryset=None): """ Filter all instances with the same h3 cell, or in cells around it :param lat: latitude :param lon: longitude :param res: resolution (level, depth) of the index where to find :param k_distance: max distance in hexagon cells from the cell containing the start point 0 for the containing cell only, 1 for the containing cell and cells immediately around, etc. :param queryset: the queryset to search in, None means cls.objects.all() """ cells = h3.compact(h3.k_ring(h3.geo_to_h3(lat, lon, res), k_distance)) d_cells = [h3d.h3s_to_h3d(c) for c in cells] filters = [models.Q(h3d__range=h3d.h3d_range(c)) for c in d_cells] if not queryset: queryset = cls.objects.all() return queryset.filter(functools.reduce(operator.or_, filters))
def _lower_resolution(list_of_coords, h3_corners, reduction_factor): if reduction_factor > 9: raise Exception("max reduction factor is 9") result = [] # The polygon's first and last coordinate must be the same in order to enclose the area so we always append the first coordinate here result.append(list_of_coords[0]) for coord in list_of_coords: if int(str(coord[0])[-1]) < reduction_factor and h3.geo_to_h3( coord[1], coord[0], 9) not in h3_corners: continue result.append(coord) # ensure that we always append the last coordinate to enclose the area if int(str(list_of_coords[-1][0])[-1]) < reduction_factor: result.append(list_of_coords[-1]) return result
def select_reliable_fire(year): """Load the fire data of a single year and reserve rows with confidence higher than 0.8 . Calculate the h3 hexagon where the remaining fire occurred based on the latitude and longitude, and return it after merging.""" raw_file = input_dir + "modis_" + year + "_Australia.csv" df_raw = pd.read_csv(raw_file, sep=',') df_sel = df_raw[df_raw["confidence"] >= 80].reset_index(drop=True) df_sel = df_sel[["latitude", "longitude"]] df_sel["hex_id"] = df_sel.apply( lambda row: h3.geo_to_h3( lat=row["latitude"], lng=row["longitude"], resolution=7), axis=1) print("A total of {} fire data are counted".format(df_sel.shape[0])) df_count = pd.DataFrame(df_sel["hex_id"].value_counts()) df_count = df_count.reset_index() fire_column = "fire_" + year df_count.columns = ["hex_id", fire_column] return df_count
def h3_tile_by_dim(lon, lat, bdim=BASE_DIM, asc=True, more=False): osmproj = pyproj.Proj("epsg:3857") resolutions = range(H3_MAX_ZOOM) if asc else reversed(range(H3_MAX_ZOOM)) for resolution in resolutions: tile = h3.geo_to_h3(lat, lon, resolution) polygon = h3.h3_to_geo_boundary(tile) x1, y1 = osmproj(*polygon[0]) x2, y2 = osmproj(*polygon[1]) dist = Point(x1, y1).distance(Point(x2, y2)) if (asc and dist < bdim) or (not asc and dist > bdim): break return tile if not more else ( tile, resolution, )