def test_validation(): h = '8a28308280fffff' # invalid hex with pytest.raises(H3CellError): h3.h3_get_base_cell(h) with pytest.raises(H3CellError): h3.h3_get_resolution(h) with pytest.raises(H3CellError): h3.h3_to_parent(h, 9) with pytest.raises(H3CellError): h3.h3_distance(h, h) with pytest.raises(H3CellError): h3.k_ring(h, 1) with pytest.raises(H3CellError): h3.hex_ring(h, 1) with pytest.raises(H3CellError): h3.h3_to_children(h, 11) with pytest.raises(H3CellError): h3.compact({h}) with pytest.raises(H3CellError): h3.uncompact({h}, 10)
def test_parent(): h = '8928308280fffff' assert h3.h3_to_parent(h, 7) == '872830828ffffff' assert h3.h3_to_parent(h, 8) == '8828308281fffff' assert h3.h3_to_parent(h, 9) == h with pytest.raises(H3ResolutionError): h3.h3_to_parent(h, 10)
def get_reward_scale(self, hex_densities, target_hex_unclipped, whitelist_hexs, normalize=False): """ :param hex_densities: dict of densiteis of each occupied hex at all levels :param whitelist_hexs: dict of densities of whitelisted hexs :param target_hex_unclipped: dict of res R hex as keys and raw count of interactive hexs as values this could be regenerated pretty easily if desired (O(|hotspots|)) if is_interactive is O(1) and fast :return: """ reward_scales = dict() whitelist_density = 0 for whex in whitelist_hexs: whitelist_density += hex_densities.get(whex, 0) for h in self.hotspots: # initialize scale initially set to clipped/unclipped count for target res hspot_hex = h3.h3_to_parent(h['location'], self.chain_vars['R']) scale = hex_densities[hspot_hex] / target_hex_unclipped[ h3.h3_to_parent(h['location'], self.chain_vars['R'])] for parent_res in range(self.chain_vars['R'] - 1, -1, -1): if hspot_hex in whitelist_hexs: break parent = h3.h3_to_parent(h['location'], parent_res) children_sum = 0 for child in h3.h3_to_children(parent, parent_res + 1): children_sum += hex_densities.get(child, 0) # multiply scale by ratio of clipped values scale *= hex_densities[parent] / children_sum hspot_hex = parent # if we stopped an arent at a whitelisted hex, this hex gets 0 rewards if hspot_hex not in whitelist_hexs: scale = 0 reward_scales[h['address']] = scale if normalize: # will set mean of all scales to 1 for ease of understanding scale_sum = 0 for v in reward_scales.values(): scale_sum += v scale_avg = scale_sum / len(reward_scales) for k in reward_scales.keys(): reward_scales[k] /= scale_avg return reward_scales
def get_hex_densities(self): # first build densities at target resolution R target_hex_unclipped = dict() all_info = dict() for h in self.hotspots: hex = h3.h3_to_parent(h['location'], self.chain_vars['R']) target_hex_unclipped.setdefault(hex, 0) all_info.setdefault(hex, dict(unclipped=0, limit=-1)) target_hex_unclipped[hex] += 1 all_info[hex]['unclipped'] += 1 hex_densities = dict() # clip targets so we have valid children in hex densities to begin ascending through list for h in target_hex_unclipped: hex_densities[h], all_info[h]['limit'] = self.__clip_hex__( h, target_hex_unclipped, return_clip=True) print(f"{len(self.hotspots)} interactive hotspots") print( f"found {len(hex_densities):4d} occupied hexs at resolution {self.chain_vars['R']}" ) # now we initialized hex_densities with appropriately clipped target hexs go from resolution R-1 to 0 occupied_children = set(list(hex_densities.keys())) for res in range(self.chain_vars['R'] - 1, -1, -1): # iterate through children getting uncipped density for each hex at this res occupied_hexs = set([]) for child_hex in occupied_children: hex = h3.h3_to_parent(child_hex, res) occupied_hexs.add(hex) hex_densities.setdefault(hex, 0) all_info.setdefault(hex, dict(unclipped=0, limit=-1)) hex_densities[hex] += hex_densities[child_hex] all_info[hex]['unclipped'] += hex_densities[child_hex] print( f"found {len(occupied_hexs):4d} occupied hexs at resolution {res}" ) # clip hex's at this res as appropriate for hex in occupied_hexs: hex_densities[hex], all_info[hex]['limit'] = self.__clip_hex__( hex, hex_densities, return_clip=True) occupied_children = occupied_hexs return hex_densities, all_info
def main(): with open('chain_vars.json', 'r') as fd: chain_vars = json.load(fd) # for now set all level 0 hex with a hotspot as whitelist whitelist_hexs = set() for h in load_hotspots(): if h['location']: whitelist_hexs.add(h3.h3_to_parent(h['location'], 0)) RS = RewardScale(chain_vars) hex_densities, all_hex_info = RS.get_hex_densities() with open(f'hexDensities_RewardScale_R{chain_vars["R"]}.csv', 'w', newline='') as csvfile: hex_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) hex_writer.writerow( ['hex', 'resolution', 'limit', 'clipped', 'child_sum', 'ratio']) for h in hex_densities: res = h3.h3_get_resolution(h) sum = all_hex_info[h]['unclipped'] ratio = 0 if sum: ratio = hex_densities[h] / sum hex_writer.writerow([ h, res, all_hex_info[h]['limit'], hex_densities[h], sum, ratio ]) target_hex_unclipped = dict() for h in all_hex_info: target_hex_unclipped[h] = all_hex_info[h]['unclipped'] hotspot_scales = RS.get_reward_scale(hex_densities, target_hex_unclipped, whitelist_hexs=whitelist_hexs, normalize=True) total_scale = 0 for v in hotspot_scales.values(): total_scale += v with open(f'hotspot_RewardScale_R{chain_vars["R"]}.csv', 'w', newline='') as csvfile: hex_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) hex_writer.writerow(['address', 'reward_scale']) for h in hotspot_scales: hex_writer.writerow([h, hotspot_scales[h]])
def generate_parents(self, resolution=RES_MAX, generation_id=0): """Takes all max res hexagons, and generates their parent hexagons. REQUIRES: -> Requires a valid resolution. Starting resolution is RES_MAX be default. EFFECTS: -> Generates new hexagons at all resolutions until RES_MIN. -> Sets the clipped and unclipped densities of each hex added. """ parent_res = resolution - 1 for hex in self.hex_dict[resolution].values(): parent = h3.h3_to_parent(hex.hex_id, parent_res) # If the parent has already been generated in the hex_dict, then there # is no need to regenerate the hex meta data. if parent in self.hex_dict[parent_res]\ and self.hex_dict[parent_res][parent].generation_id == generation_id: self.hex_dict[parent_res][parent].residents += hex.residents continue self.hex_dict[parent_res][parent] = Hexagon(parent) self.hex_dict[parent_res][parent].generation_id = generation_id self.hex_dict[parent_res][parent].residents += hex.residents # Generate the unclipped density of the parent hex unclipped = 0 for child in self.children(self.hex_dict[parent_res][parent]): unclipped += child.clipped_density self.hex_dict[parent_res][parent].unclipped_density = unclipped # once all parents are generated along with their unclipped densities, # we can then generate their clipped densities. for parent in self.hex_dict[parent_res].values(): neighbors = self.neighbors(parent) occupied_count = 0 for neighbor in neighbors: if neighbor.unclipped_density >= HIP_RES_META[ parent.res][1]: occupied_count += 1 parent.occupied_count = occupied_count parent.hex_density_limit = min( HIP_RES_META[parent.res][1] * max(occupied_count - HIP_RES_META[parent.res][0] + 1, 1), HIP_RES_META[parent.res][2]) parent.clipped_density = min(parent.unclipped_density, parent.hex_density_limit) if resolution > RES_MIN: self.generate_parents(resolution - 1, generation_id)
def compute_reward_scale(self, lat: float, lng: float): """Generate the reward scale for a given location.""" hex_id = h3.geo_to_h3(lat, lng, RES_MAX) if hex_id not in self.hex_dict[RES_MAX]: raise Exception( "Cannot compute reward scale. Invalid starting hex.") current_hex = self.hex_dict[RES_MAX][hex_id] reward_scale = 1 while current_hex.res >= RES_MIN: parent = self[h3.h3_to_parent(current_hex.hex_id, current_hex.res - 1)] reward_scale = reward_scale * (parent.clipped_density / parent.unclipped_density) current_hex = parent return reward_scale
def sum_by_hexagon(df, resolution, pol, fr, to, vessel_type=[], gt=[]): """ Use h3.geo_to_h3 to index each data point into the spatial index of the specified resolution. Use h3.h3_to_geo_boundary to obtain the geometries of these hexagons Ex counts_by_hexagon(data, 8) """ if vessel_type: df_aggreg = df[((df.dt_pos_utc.between(fr, to)) & (df.StandardVesselType.isin(vessel_type)))] else: df_aggreg = df[df.dt_pos_utc.between(fr, to)] if df_aggreg.shape[0] > 0: if gt: df_aggreg = df_aggreg[df_aggreg.GrossTonnage.between(gt[0], gt[1])] if resolution == 8: df_aggreg = df_aggreg.groupby(by="res_8").agg({ "co2_t": sum, "ch4_t": sum }).reset_index() else: df_aggreg = df_aggreg.assign(new_res=df_aggreg.res_8.apply( lambda x: h3.h3_to_parent(x, resolution))) df_aggreg = df_aggreg.groupby(by="new_res").agg({ "co2_t": sum, "ch4_t": sum }).reset_index() df_aggreg.columns = ["hex_id", "co2_t", "ch4_t"] df_aggreg["geometry"] = df_aggreg.hex_id.apply( lambda x: { "type": "Polygon", "coordinates": [h3.h3_to_geo_boundary(x, geo_json=True)] }) return df_aggreg else: return df_aggreg
def test_parent(): assert h3.h3_to_parent('8928308280fffff', 8) == '8828308281fffff' assert h3.h3_to_parent('8928308280fffff', 7) == '872830828ffffff' # todo: this should probably return None, eh? assert h3.h3_to_parent('8928308280fffff', 10) == '0'
def plot_hotspot_probs(hprobs, lat=None, lng=None, R=8, geo_range=1.0, outputfile='hotspot_probs.html', bbox=None): """ :param hprobs: list of hotspot probabilities :param lat: map center latitude :param lng: map center longitude :param outputfile: :param bbox: lat upper left, long upper left, lat upper right, long upper right :return: """ hs = load_hotspots() h_by_addr = dict() for h in hs: h_by_addr[h['address']] = h if not bbox: bbox = [ lat + geo_range, lng - geo_range, lat - geo_range, lng + geo_range ] else: lat = (bbox[0] + bbox[2]) / 2 lng = (bbox[1] + bbox[3]) / 2 my_map = folium.Map(location=[lat, lng], zoom_start=6) vals = [x['odds'] for x in hprobs] cnorm = mpl.colors.TwoSlopeNorm(vcenter=1.0, vmin=np.min(vals), vmax=2) # np.max(vals)) colmap = cm.ScalarMappable(norm=cnorm, cmap='RdYlGn') idc = np.argsort(vals) hexs = set([]) # store hex's where odds < 1.0 for displaying hex_parent = set([]) hex_gparent = set([]) for idx in idc[::-1]: hp = hprobs[idx] hlat = h_by_addr[hp['addr']]['lat'] + (random.random() - 0.5) * .0004 hlng = h_by_addr[hp['addr']]['lng'] + (random.random() - 0.5) * .0004 if not (bbox[2] < hlat < bbox[0] and bbox[1] < hlng < bbox[3]): continue color = "#" + ''.join( [f"{x:02x}" for x in colmap.to_rgba(hp['odds'], bytes=True)[:3]]) folium.CircleMarker( (hlat, hlng), color='black', fill_color=color, popup=f"{h_by_addr[hp['addr']]['name']} [{hp['odds']:.2f}]", fill=True, fill_opacity=0.9, number_of_sides=8, radius=11, opacity=.35, weight=2, z_index_offset=2 - int(hp['odds'])).add_to(my_map) hexs.add(h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R)) hex_parent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 1)) hex_gparent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 2)) print(f"drawing {len(hexs)} target hexs") for hex in hexs: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='black', opacity=.45).add_to(my_map) print(f"drawing {len(hex_parent)} parent hexs") for hex in hex_parent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, opacity=.65).add_to(my_map) print(f"drawing {len(hex_gparent)} grandparent hexs") for hex in hex_gparent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='white', opacity=.65).add_to(my_map) my_map.save(outputfile)
def map_hotspot_scale(hscale, lat=None, lng=None, R=8, geo_range=1.0, outputfile='hotspot_probs.html', bbox=None): """ :param hscale: list of hotspot probabilities :param lat: map center latitude :param lng: map center longitude :param outputfile: :param bbox: lat upper left, long upper left, lat upper right, long upper right :return: """ hs = load_hotspots() h_by_addr = dict() for h in hs: h_by_addr[h['address']] = h if not bbox: bbox = [ lat + geo_range, lng - geo_range, lat - geo_range, lng + geo_range ] else: lat = (bbox[0] + bbox[2]) / 2 lng = (bbox[1] + bbox[3]) / 2 tiles = 'http://{s}.tiles.mapbox.com/v4/wtgeographer.2fb7fc73/{z}/{x}/{y}.png?access_token=pk.eyJ1IjoiY2Fybml2ZXJvdXMxOSIsImEiOiJja2U5Y3RyeGsxejd1MnBxZ2RiZXUxNHE2In0.S_Ql9KARjRdzgh1ZaJ-_Hw' my_map = folium.Map( location=[lat, lng], zoom_start=6, #tiles=tiles, #API_key='pk.eyJ1IjoiY2Fybml2ZXJvdXMxOSIsImEiOiJja2U5Y3RyeGsxejd1MnBxZ2RiZXUxNHE2In0.S_Ql9KARjRdzgh1ZaJ-_Hw' #attr='Mapbox' ) vals = [x['odds'] for x in hscale] avg = np.mean(vals) #vals = np.array(vals)/avg print(f"{np.max(vals)}") print(f"average scaling factor = {avg}") cnorm = mpl.colors.TwoSlopeNorm(vcenter=avg, vmin=np.min(vals), vmax=np.max(vals) * 1.2) colmap = cm.ScalarMappable(norm=cnorm, cmap='RdYlGn') idc = np.argsort(vals) hexs = set([]) # store hex's where odds < 1.0 for displaying hex_parent = set([]) hex_gparent = set([]) hex_ggparent = set([]) for idx in idc[::-1]: hp = hscale[idx] hlat = h_by_addr[hp['addr']]['lat'] #+ (random.random()-0.5)*.0000 hlng = h_by_addr[hp['addr']]['lng'] #+ (random.random()-0.5)*.0000 if not (bbox[2] < hlat < bbox[0] and bbox[1] < hlng < bbox[3]): continue color = "#" + ''.join( [f"{x:02x}" for x in colmap.to_rgba(hp['odds'], bytes=True)[:3]]) folium.CircleMarker( (hlat, hlng), color='black', fill_color=color, popup=f"{h_by_addr[hp['addr']]['name']} [{hp['odds']:.2f}]", fill=True, fill_opacity=0.9, number_of_sides=8, radius=11, opacity=.35, weight=2, z_index_offset=2 - int(hp['odds'])).add_to(my_map) hexs.add(h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R)) hex_parent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 1)) hex_gparent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 2)) hex_ggparent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 3)) print(f"drawing {len(hexs)} target hexs") for hex in hexs: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='black', opacity=.45).add_to(my_map) folium.Polygon(hex_points) print(f"drawing {len(hex_parent)} parent hexs") for hex in hex_parent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, opacity=.65).add_to(my_map) print(f"drawing {len(hex_gparent)} grandparent hexs") for hex in hex_gparent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='white', opacity=.65).add_to(my_map) print(f"drawing {len(hex_gparent)} great grandparent hexs") for hex in hex_ggparent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='pink', opacity=.65).add_to(my_map) my_map.save(outputfile)
def test_h3_to_parent(): h = '89283082813ffff' assert h3.h3_to_parent(h, 8) == '8828308281fffff'