def __init__(self, chain_vars): I = Interactive() self.hotspots = [] self.h_by_addr = dict() for h in load_hotspots(): if I.is_interactive(h['address']): self.hotspots.append(h) self.h_by_addr[h['address']] = h self.chain_vars = chain_vars
def __init__(self, force=False): """ Interface for easily finding hotspots :param force: Force reload hotspots """ self.hotspots = load_hotspots(force) self.hspot_by_addr = dict() self.hspot_by_name = dict() # note there are already name collisions use at your own risk for h in self.hotspots: self.hspot_by_addr[h['address']] = h self.hspot_by_name[h['name'].lower()] = h
def main(): with open('chain_vars.json', 'r') as fd: chain_vars = json.load(fd) # for now set all level 0 hex with a hotspot as whitelist whitelist_hexs = set() for h in load_hotspots(): if h['location']: whitelist_hexs.add(h3.h3_to_parent(h['location'], 0)) RS = RewardScale(chain_vars) hex_densities, all_hex_info = RS.get_hex_densities() with open(f'hexDensities_RewardScale_R{chain_vars["R"]}.csv', 'w', newline='') as csvfile: hex_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) hex_writer.writerow( ['hex', 'resolution', 'limit', 'clipped', 'child_sum', 'ratio']) for h in hex_densities: res = h3.h3_get_resolution(h) sum = all_hex_info[h]['unclipped'] ratio = 0 if sum: ratio = hex_densities[h] / sum hex_writer.writerow([ h, res, all_hex_info[h]['limit'], hex_densities[h], sum, ratio ]) target_hex_unclipped = dict() for h in all_hex_info: target_hex_unclipped[h] = all_hex_info[h]['unclipped'] hotspot_scales = RS.get_reward_scale(hex_densities, target_hex_unclipped, whitelist_hexs=whitelist_hexs, normalize=True) total_scale = 0 for v in hotspot_scales.values(): total_scale += v with open(f'hotspot_RewardScale_R{chain_vars["R"]}.csv', 'w', newline='') as csvfile: hex_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) hex_writer.writerow(['address', 'reward_scale']) for h in hotspot_scales: hex_writer.writerow([h, hotspot_scales[h]])
def __init__(self): hotspots = load_hotspots() self.h_by_addr = dict() for h in hotspots: self.h_by_addr[h['address']] = h interactives = set([]) found_witness = False try: with open('witnesses.csv', newline='') as csvfile: reader = csv.reader(csvfile, delimiter=',', quotechar='"') count = 0 results = [] for row in reader: dist = haversine_km(self.h_by_addr[row[0].strip()]['lat'], self.h_by_addr[row[0].strip()]['lng'], self.h_by_addr[row[1].strip()]['lat'], self.h_by_addr[row[1].strip()]['lng']) if dist < 0.3: continue interactives.add(row[0].strip()) #interactives.add(row[1].strip()) found_witness = True except FileNotFoundError as e: pass try: with open('witnesses2.csv', newline='') as csvfile: reader = csv.reader(csvfile, delimiter=',', quotechar='"') count = 0 results = [] for row in reader: dist = haversine_km(self.h_by_addr[row[0].strip()]['lat'], self.h_by_addr[row[0].strip()]['lng'], self.h_by_addr[row[1].strip()]['lat'], self.h_by_addr[row[1].strip()]['lng']) if dist < 0.3: continue interactives.add(row[0].strip()) #interactives.add(row[1].strip()) found_witness = True except FileNotFoundError as e: pass if not found_witness: print( "WARNING no 'witnesses.csv' found with addresses of interactive hotsots, will assume all hotspots interactive" ) print( f"\tthis will run to understand the algorithm but will give very wrong results" ) interactives = [h['address'] for h in hotspots] self.interactives = interactives
def __init__(self): hs = load_hotspots() self.h_by_addr = dict() for h in hs: self.h_by_addr[h['address']] = h # key = hotspot address # value = dict with keys of witness address, values = distance # only witnesses > 300m will appear self.witnesses = dict() # try: # self.load_wit_file('witnesses.csv') # except FileNotFoundError as e: # pass try: self.load_wit_file('witnesses2.csv') except FileNotFoundError as e: pass
def main(): hs = utils.load_hotspots() sample_neighbor(hotspots=hs, density_tgt=1, density_max=4, R=8, N=2)
def map_reward_file(filename='real_rewards.csv', bbox=[0, 0, 0, 0]): hs = [] h_by_addr = dict() for h in load_hotspots(): hs.append(h) h_by_addr[h['address']] = h lat = bbox[0] / 2 + bbox[2] / 2 lng = bbox[1] / 2 + bbox[3] / 2 my_map = folium.Map(location=[lat, lng], zoom_start=6) cnorm = mpl.colors.TwoSlopeNorm(vcenter=5, vmin=0, vmax=9) scalemap = cm.ScalarMappable(norm=cnorm, cmap='RdYlGn') with open(filename, 'r', newline='') as csvfile: reward_reader = csv.reader(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) reward_reader.__next__() rewards = [] for row in reward_reader: h = row[0] hlat = h_by_addr[h]['lat'] hlng = h_by_addr[h]['lng'] if h not in h_by_addr: continue if not (bbox[2] < hlat < bbox[0] and bbox[1] < hlng < bbox[3]): continue rewards.append([row[0], float(row[-1])]) rw = np.array([r[-1] for r in rewards]) pct = [.1, .2, .3, .4, .5, .6, .7, .8, .9] qs = np.quantile(rw[rw > 0], pct) for i in range(0, len(qs)): print(f"{pct[i]*100}% cutoff at {qs[i]:.3f}") for r in rewards: h = r[0] hlat = h_by_addr[h]['lat'] #+ (random.random()-0.5)*.0000 hlng = h_by_addr[h]['lng'] #+ (random.random()-0.5)*.0000 if not (bbox[2] < hlat < bbox[0] and bbox[1] < hlng < bbox[3]): continue idx = np.searchsorted(qs, [r[-1]])[0] color_body = "#" + ''.join( [f"{x:02x}" for x in scalemap.to_rgba(idx, bytes=True)[:3]]) #color_body = "#" + ''.join([f"{x:02x}" for x in earnmap.to_rgba(rewards[h], bytes=True)[:3]]) if idx == 0: qstr = f"<{pct[0]*100:.0f}" elif idx == len(qs): qstr = f">{pct[-1] * 100:.0f}" else: qstr = f"{pct[idx-1] * 100:.0f}-{pct[idx] * 100:.0f}" folium.CircleMarker( (hlat, hlng), color=color_body, fill_color=color_body, popup= f"<nobr>{h_by_addr[h]['name']}</nobr><br>rew. units:{r[-1]:.2f}<br><nobr>rew. pct:{qstr}%</nobr>", fill=True, fill_opacity=0.55, number_of_sides=8, radius=11, opacity=1, weight=3).add_to(my_map) my_map.save('para1_realrewards.html')
def map_hotspot_rewards(outputfile='expected_rewards.html', bbox=None): """ :param outputfile: :param bbox: lat upper left, long upper left, lat upper right, long upper right :return: """ hs = [] h_by_addr = dict() for h in load_hotspots(): hs.append(h) h_by_addr[h['address']] = h lat = (bbox[0] + bbox[2]) / 2 lng = (bbox[1] + bbox[3]) / 2 Wits = Witnesses() #Wits.output_witness_edges('sim_witness_edges.csv') #hspots = get_hotspot_scales(fn='hotspot_RewardScale_R9.csv') # ignore hex scaling just simulate beaconing hspots = [] for h in hs: hspots.append(dict(addr=h['address'], odds=1.0)) rewards = dict() # simulate transmissions for h in hspots: haddr = h['addr'] #print(f"simulated rewards for {haddr}") rew = Wits.simulate_transmit(txaddr=haddr, num_txs=500, scale=h['odds'], normalize=True) if h_by_addr[haddr]['name'] == 'recumbent-magenta-aphid': for h in rew.keys(): print(f" neighor: {h}, reward={rew[h]:.3f} w/ scale ") for r in rew: rewards.setdefault(r, 0) rewards[r] += rew[r] if r in h_by_addr and h_by_addr[r]['name'] == 'faint-pecan-trout': print(f"faint earned: {rew[r]:.4f}, at: {rewards[r]}") rw_sum = 0 rw_max = 0 max_h = None cnt = 0 for h in rewards.keys(): #rewards[h] = math.sqrt(rewards[h]) x = rewards[h] if x == 0: continue #print(f"{h} rw: {x:.3f}") rw_max = max(rw_max, x) if rw_max == x: max_h = h rw_sum += x cnt += 1 rw_avg = rw_sum / cnt print( f"max earner {h_by_addr[max_h]['name']}, earned {rw_max:.3f} with {len(Wits.get_witnesses(max_h))} witnesses" ) print(f"max at {h_by_addr[max_h]['geocode']}") plt.figure() rw = np.array(list(rewards.values())) pct = [.1, .2, .3, .4, .5, .6, .7, .8, .9] qs = np.quantile(rw[rw > 0], pct) for i in range(0, len(qs)): print(f"{pct[i]*100}% cutoff at {qs[i]:.3f}") plt.hist(rw[rw > 0], bins=50) plt.xlabel("simulated reward units") plt.ylabel("hotspot count") plt.title("Reward distribution for HIP15") plt.grid() plt.show() make_map = True if make_map: my_map = folium.Map(location=[lat, lng], zoom_start=6) vals = [x['odds'] for x in hspots] cnorm = mpl.colors.TwoSlopeNorm(vcenter=5, vmin=0, vmax=9) scalemap = cm.ScalarMappable(norm=cnorm, cmap='RdYlGn') # # cnorm = mpl.colors.TwoSlopeNorm(vmin=0, vcenter=(np.median(rw)), vmax=(rw_max)) # earnmap = cm.ScalarMappable(norm=cnorm, cmap='cool') scale_dict = dict() for h in hspots: scale_dict[h['addr']] = h['odds'] idc = np.argsort(list(rewards.values())) hs = np.array(list(rewards.keys())) with open('simulated_rewards_bcn.csv', 'w', newline='') as csvfile: reward_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) reward_writer.writerow( ['address', 'h3', 'lat', 'lng', 'tx_rw_scale', 'E_reward']) for h in hs[idc]: if h not in h_by_addr: continue if rewards[h] == 0: continue hlat = h_by_addr[h]['lat'] #+ (random.random()-0.5)*.0000 hlng = h_by_addr[h]['lng'] #+ (random.random()-0.5)*.0000 if not (bbox[2] < hlat < bbox[0] and bbox[1] < hlng < bbox[3]): continue if h not in scale_dict: continue reward_writer.writerow([ h, h_by_addr[h]['location'], f"{h_by_addr[h]['lat']:.5f}", f"{h_by_addr[h]['lng']:.5f}", f"{scale_dict[h]:.4f}", f"{rewards[h]:.3f}" ]) if make_map: idx = np.searchsorted(qs, [rewards[h]])[0] color_body = "#" + ''.join([ f"{x:02x}" for x in scalemap.to_rgba(idx, bytes=True)[:3] ]) #color_body = "#" + ''.join([f"{x:02x}" for x in earnmap.to_rgba(rewards[h], bytes=True)[:3]]) if idx == 0: qstr = f"<{pct[0]*100:.0f}" elif idx == len(qs): qstr = f">{pct[-1] * 100:.0f}" else: qstr = f"{pct[idx-1] * 100:.0f}-{pct[idx] * 100:.0f}" folium.CircleMarker( (hlat, hlng), color=color_body, fill_color=color_body, popup= f"<nobr>{h_by_addr[h]['name']}</nobr><br>rew. units:{(rewards[h]):.2f}<br><nobr>rew. pct:{qstr}%</nobr><br>#wits:{len(Wits.get_witnesses(h))}", fill=True, fill_opacity=0.55, number_of_sides=8, radius=11, opacity=1, weight=3).add_to(my_map) if make_map: my_map.save(outputfile)
def plot_hotspot_probs(hprobs, lat=None, lng=None, R=8, geo_range=1.0, outputfile='hotspot_probs.html', bbox=None): """ :param hprobs: list of hotspot probabilities :param lat: map center latitude :param lng: map center longitude :param outputfile: :param bbox: lat upper left, long upper left, lat upper right, long upper right :return: """ hs = load_hotspots() h_by_addr = dict() for h in hs: h_by_addr[h['address']] = h if not bbox: bbox = [ lat + geo_range, lng - geo_range, lat - geo_range, lng + geo_range ] else: lat = (bbox[0] + bbox[2]) / 2 lng = (bbox[1] + bbox[3]) / 2 my_map = folium.Map(location=[lat, lng], zoom_start=6) vals = [x['odds'] for x in hprobs] cnorm = mpl.colors.TwoSlopeNorm(vcenter=1.0, vmin=np.min(vals), vmax=2) # np.max(vals)) colmap = cm.ScalarMappable(norm=cnorm, cmap='RdYlGn') idc = np.argsort(vals) hexs = set([]) # store hex's where odds < 1.0 for displaying hex_parent = set([]) hex_gparent = set([]) for idx in idc[::-1]: hp = hprobs[idx] hlat = h_by_addr[hp['addr']]['lat'] + (random.random() - 0.5) * .0004 hlng = h_by_addr[hp['addr']]['lng'] + (random.random() - 0.5) * .0004 if not (bbox[2] < hlat < bbox[0] and bbox[1] < hlng < bbox[3]): continue color = "#" + ''.join( [f"{x:02x}" for x in colmap.to_rgba(hp['odds'], bytes=True)[:3]]) folium.CircleMarker( (hlat, hlng), color='black', fill_color=color, popup=f"{h_by_addr[hp['addr']]['name']} [{hp['odds']:.2f}]", fill=True, fill_opacity=0.9, number_of_sides=8, radius=11, opacity=.35, weight=2, z_index_offset=2 - int(hp['odds'])).add_to(my_map) hexs.add(h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R)) hex_parent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 1)) hex_gparent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 2)) print(f"drawing {len(hexs)} target hexs") for hex in hexs: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='black', opacity=.45).add_to(my_map) print(f"drawing {len(hex_parent)} parent hexs") for hex in hex_parent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, opacity=.65).add_to(my_map) print(f"drawing {len(hex_gparent)} grandparent hexs") for hex in hex_gparent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='white', opacity=.65).add_to(my_map) my_map.save(outputfile)
def map_hotspot_scale(hscale, lat=None, lng=None, R=8, geo_range=1.0, outputfile='hotspot_probs.html', bbox=None): """ :param hscale: list of hotspot probabilities :param lat: map center latitude :param lng: map center longitude :param outputfile: :param bbox: lat upper left, long upper left, lat upper right, long upper right :return: """ hs = load_hotspots() h_by_addr = dict() for h in hs: h_by_addr[h['address']] = h if not bbox: bbox = [ lat + geo_range, lng - geo_range, lat - geo_range, lng + geo_range ] else: lat = (bbox[0] + bbox[2]) / 2 lng = (bbox[1] + bbox[3]) / 2 tiles = 'http://{s}.tiles.mapbox.com/v4/wtgeographer.2fb7fc73/{z}/{x}/{y}.png?access_token=pk.eyJ1IjoiY2Fybml2ZXJvdXMxOSIsImEiOiJja2U5Y3RyeGsxejd1MnBxZ2RiZXUxNHE2In0.S_Ql9KARjRdzgh1ZaJ-_Hw' my_map = folium.Map( location=[lat, lng], zoom_start=6, #tiles=tiles, #API_key='pk.eyJ1IjoiY2Fybml2ZXJvdXMxOSIsImEiOiJja2U5Y3RyeGsxejd1MnBxZ2RiZXUxNHE2In0.S_Ql9KARjRdzgh1ZaJ-_Hw' #attr='Mapbox' ) vals = [x['odds'] for x in hscale] avg = np.mean(vals) #vals = np.array(vals)/avg print(f"{np.max(vals)}") print(f"average scaling factor = {avg}") cnorm = mpl.colors.TwoSlopeNorm(vcenter=avg, vmin=np.min(vals), vmax=np.max(vals) * 1.2) colmap = cm.ScalarMappable(norm=cnorm, cmap='RdYlGn') idc = np.argsort(vals) hexs = set([]) # store hex's where odds < 1.0 for displaying hex_parent = set([]) hex_gparent = set([]) hex_ggparent = set([]) for idx in idc[::-1]: hp = hscale[idx] hlat = h_by_addr[hp['addr']]['lat'] #+ (random.random()-0.5)*.0000 hlng = h_by_addr[hp['addr']]['lng'] #+ (random.random()-0.5)*.0000 if not (bbox[2] < hlat < bbox[0] and bbox[1] < hlng < bbox[3]): continue color = "#" + ''.join( [f"{x:02x}" for x in colmap.to_rgba(hp['odds'], bytes=True)[:3]]) folium.CircleMarker( (hlat, hlng), color='black', fill_color=color, popup=f"{h_by_addr[hp['addr']]['name']} [{hp['odds']:.2f}]", fill=True, fill_opacity=0.9, number_of_sides=8, radius=11, opacity=.35, weight=2, z_index_offset=2 - int(hp['odds'])).add_to(my_map) hexs.add(h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R)) hex_parent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 1)) hex_gparent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 2)) hex_ggparent.add( h3.h3_to_parent(h_by_addr[hp['addr']]['location'], R - 3)) print(f"drawing {len(hexs)} target hexs") for hex in hexs: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='black', opacity=.45).add_to(my_map) folium.Polygon(hex_points) print(f"drawing {len(hex_parent)} parent hexs") for hex in hex_parent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, opacity=.65).add_to(my_map) print(f"drawing {len(hex_gparent)} grandparent hexs") for hex in hex_gparent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='white', opacity=.65).add_to(my_map) print(f"drawing {len(hex_gparent)} great grandparent hexs") for hex in hex_ggparent: hex_points = list(h3.h3_to_geo_boundary(hex, False)) hex_points.append(hex_points[0]) folium.PolyLine(hex_points, weight=1.5, color='pink', opacity=.65).add_to(my_map) my_map.save(outputfile)
help='hotspot name to analyze with dashes-between-words') parser.add_argument('-f', '--file', help='data file(s) for tax processing') parser.add_argument('-y', '--year', help='filter to a given tax year') args = parser.parse_args() H = Hotspots() hotspots = [] if args.name: names = args.name.split(',') for name in names: hotspot = H.get_hotspot_by_name(name) if hotspot is None: raise ValueError( f"could not find hotspot named '{name}' use dashes between words" ) hotspots.append(hotspot) year = -1 if args.year: year = int(args.year) print(f"running for tax year: {year}") if args.x == 'refresh_hotspots': load_hotspots(True) if args.x == 'hnt_rewards': load_hnt_rewards(hotspots) if args.x == 'tax_lots': load_tax_lots(hotspots, year) if args.x == 'parse_trades': parse_trades(args.file) if args.x == 'schedule_d': process_trades(hotspots, args.file, year)