def test_validation(): h = '8a28308280fffff' # invalid hex with pytest.raises(H3CellError): h3.h3_get_base_cell(h) with pytest.raises(H3CellError): h3.h3_get_resolution(h) with pytest.raises(H3CellError): h3.h3_to_parent(h, 9) with pytest.raises(H3CellError): h3.h3_distance(h, h) with pytest.raises(H3CellError): h3.k_ring(h, 1) with pytest.raises(H3CellError): h3.hex_ring(h, 1) with pytest.raises(H3CellError): h3.h3_to_children(h, 11) with pytest.raises(H3CellError): h3.compact({h}) with pytest.raises(H3CellError): h3.uncompact({h}, 10)
def test8(): assert h3.h3_is_valid('89283082803ffff') assert not h3.h3_is_valid('abc') # looks like it might be valid, but it isn't! h_bad = '8a28308280fffff' assert not h3.h3_is_valid(h_bad) # other methods should validate and raise exception if bad input with pytest.raises(H3CellError): h3.h3_get_resolution(h_bad)
def test_get_res0_indexes(): out = h3.get_res0_indexes() assert len(out) == 122 # subset pentagons = h3.get_pentagon_indexes(0) assert pentagons < out # all valid assert all(map(h3.h3_is_valid, out)) # resolution assert all(map( lambda h: h3.h3_get_resolution(h) == 0, out )) # verify a few concrete cells sub = { '8001fffffffffff', '8003fffffffffff', '8005fffffffffff', } assert sub < out
def __setitem__(self, hex_id, hex): """Set a hex with the given hex_id.""" res = h3.h3_get_resolution(hex_id) if not isinstance(hex, Hexagon): raise Exception( "Can only set hex_dict items with a Hexagon object.") self.hex_dict[res][hex_id] = hex
def __init__(self, hex_id): """Hexagon constructor.""" self.hex_id = hex_id # hex res metadata self.res = h3.h3_get_resolution(hex_id) # Hex density metadata self.raw_density = 0 self.clipped_density = 0 self.unclipped_density = 0 # Information for reward scale algorithm self.density_max = HIP_RES_META[self.res][2] self.density_tgt = HIP_RES_META[self.res][1] self.N = HIP_RES_META[self.res][0] # more info for the algorithms self.occupied_count = 0 self.hex_density_limit = self.density_max # generation id information for generating meta data about a hex. self.generation_id = -1 self.residents = []
def __clip_hex__(self, hex, hex_densities, return_clip=False): """ :param hex: hex string to evaluate :param hex_densities: dictionary of all hex densities, atleast at this resolution :return: this hex density clipped based on neigbhors """ res = h3.h3_get_resolution(hex) res_key = str(res) neighbors = h3.hex_range(hex, 1) at_tgt_count = 0 for n in neighbors: if hex_densities.get( n, 0) >= self.chain_vars['res_vars'][res_key]['density_tgt']: at_tgt_count += 1 clip = min( self.chain_vars['res_vars'][res_key]['density_max'], self.chain_vars['res_vars'][res_key]['density_tgt'] * max(1, (at_tgt_count - self.chain_vars['res_vars'][res_key]['N'] + 1))) val = min(clip, hex_densities[hex]) if return_clip: return val, clip return val
def region_prior_2(region, time): global df_freq df_freq = df_freq[df_freq.time < time] resolution = h3.h3_get_resolution(region[2:]) incident_df = load_incidents(resolution, df_freq) lower = len(incident_df) + len(np.unique(incident_df.region)) * 24 prior = (np.sum((incident_df.region == region[2:]) & (incident_df.time.dt.hour == time.hour)) + 1) / lower return prior
def region_prior(region, time): global df_freq df_freq = df_freq[df_freq.time < '2019-10-01'] resolution = h3.h3_get_resolution(region[2:]) incident_df = load_incidents(resolution, df_freq) # change to historical data values # use labels_prv to prevent overfitting # return 1 / len(np.unique(labels.region)) lower = len(incident_df) + len(np.unique(incident_df.region)) * 24 prior = (np.sum((incident_df.region == region[2:]) & (incident_df.time.dt.hour == time.hour)) + 1) / lower # prior = np.sum(incident_df['region'] == region[2:]) / len(incident_df) return prior
def main(): with open('chain_vars.json', 'r') as fd: chain_vars = json.load(fd) # for now set all level 0 hex with a hotspot as whitelist whitelist_hexs = set() for h in load_hotspots(): if h['location']: whitelist_hexs.add(h3.h3_to_parent(h['location'], 0)) RS = RewardScale(chain_vars) hex_densities, all_hex_info = RS.get_hex_densities() with open(f'hexDensities_RewardScale_R{chain_vars["R"]}.csv', 'w', newline='') as csvfile: hex_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) hex_writer.writerow( ['hex', 'resolution', 'limit', 'clipped', 'child_sum', 'ratio']) for h in hex_densities: res = h3.h3_get_resolution(h) sum = all_hex_info[h]['unclipped'] ratio = 0 if sum: ratio = hex_densities[h] / sum hex_writer.writerow([ h, res, all_hex_info[h]['limit'], hex_densities[h], sum, ratio ]) target_hex_unclipped = dict() for h in all_hex_info: target_hex_unclipped[h] = all_hex_info[h]['unclipped'] hotspot_scales = RS.get_reward_scale(hex_densities, target_hex_unclipped, whitelist_hexs=whitelist_hexs, normalize=True) total_scale = 0 for v in hotspot_scales.values(): total_scale += v with open(f'hotspot_RewardScale_R{chain_vars["R"]}.csv', 'w', newline='') as csvfile: hex_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) hex_writer.writerow(['address', 'reward_scale']) for h in hotspot_scales: hex_writer.writerow([h, hotspot_scales[h]])
def choropleth_map(ghg, df_aggreg, layout_in, fill_opacity=0.5): """ Creates choropleth maps given the aggregated data. """ if ghg == "co2": ghg = "co2_t" elif ghg == "ch4": ghg = "ch4_t" else: ValueError("Enter ch4 or co2") df_aggreg.rename(columns={ghg: "value"}, inplace=True) #colormap min_value = df_aggreg["value"].min() max_value = df_aggreg["value"].max() m = round((min_value + max_value) / 2, 0) #take resolution from the first row res = h3.h3_get_resolution(df_aggreg.loc[0, 'hex_id']) #create geojson data from dataframe geojson_data = json.loads(hexagons_dataframe_to_geojson(df_hex=df_aggreg)) ##plot on map initial_map = go.Choroplethmapbox(geojson=geojson_data, locations=df_aggreg.hex_id.tolist(), z=df_aggreg["value"].round(2).tolist(), colorscale="balance", marker_opacity=fill_opacity, marker_line_width=1, colorbar=dict(thickness=20, ticklen=3, title="tonnes"), hovertemplate='%{z:,.2f}<extra></extra>') initial_map = go.Figure(data=initial_map, layout=layout_in) return initial_map
def test9(): assert h3.h3_get_resolution('8928308280fffff') == 9 assert h3.h3_get_resolution('8a28308280f7fff') == 10
def __getitem__(self, hex_id): """Return the hex with the given hex id.""" res = h3.h3_get_resolution(hex_id) if hex_id not in self.hex_dict[res]: raise Exception("item not in hex dict. illigal access.") return self.hex_dict[res][hex_id]
def __contains__(self, hex_id): """Determine if a given hex_id is in the hex_dict.""" res = h3.h3_get_resolution(hex_id) if not hex_id in self.hex_dict[res]: return False return True
def choropleth_map(df_aggreg, border_color='black', fill_opacity=0.7, initial_map=None, with_legend=False, kind="linear"): """ Creates choropleth maps given the aggregated data. """ #colormap min_value = df_aggreg["value"].min() max_value = df_aggreg["value"].max() m = round((min_value + max_value) / 2, 0) #take resolution from the first row res = h3.h3_get_resolution(df_aggreg.loc[1, 'hex_id']) if initial_map is None: f = folium.Figure(width=1000, height=1000) initial_map = folium.Map( location=[40.7128, -74.0060], zoom_start=10, tiles="cartodbpositron", attr= '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors © <a href="http://cartodb.com/attributions#basemaps">CartoDB</a>' ).add_to(f) #the colormap #color names accepted https://github.com/python-visualization/branca/blob/master/branca/_cnames.json if kind == "linear": custom_cm = cm.LinearColormap(['green', 'yellow', 'red'], vmin=min_value, vmax=max_value) elif kind == "outlier": #for outliers, values would be -11,0,1 custom_cm = cm.LinearColormap(['blue', 'white', 'red'], vmin=min_value, vmax=max_value) elif kind == "filled_nulls": custom_cm = cm.LinearColormap(['sienna', 'green', 'yellow', 'red'], index=[0, min_value, m, max_value], vmin=min_value, vmax=max_value) #create geojson data from dataframe geojson_data = hexagons_dataframe_to_geojson(df_hex=df_aggreg) #plot on map name_layer = "Choropleth " + str(res) if kind != "linear": name_layer = name_layer + kind folium.GeoJson(geojson_data, style_function=lambda feature: { 'fillColor': custom_cm(feature['properties']['value']), 'color': border_color, 'weight': 1, 'fillOpacity': fill_opacity }, name=name_layer).add_to(initial_map) #add legend (not recommended if multiple layers) if with_legend == True: custom_cm.add_to(initial_map) return initial_map
def test_h3_get_resolution(): for res in range(16): h = h3.geo_to_h3(37.3615593, -122.0553238, res) assert h3.h3_get_resolution(h) == res