def nodes(network_name): if network_name == 'favicon.ico': abort(404) nodes = load_bz2_pickle(nodes_latest_path(network_name)) return render_template('detail.html', nodes=list(nodes.values()), network_name=network_name)
def get_all_blocks(): """ retrieves all blocks on chain and caches when possible will be REALLY slow with large block downloads as calls are throttled. """ cached_blocks_file = DATA_PATH / "block_cache.pbz2" if Path.exists(cached_blocks_file): blocks = load_bz2_pickle(cached_blocks_file) last_height = blocks[-1]["header"]["height"] else: blocks = [] last_height = -1 block = get_block()["result"]["block"] new_blocks = [] cur_height = block["header"]["height"] print( f"Downloading blocks from cur height: {cur_height} down to cached height: {last_height}." ) for _ in range(cur_height - last_height): new_blocks.append(block) time.sleep(0.1) parent_hash = block["header"]["parent_hash"] if parent_hash != '0000000000000000000000000000000000000000000000000000000000000000': block = get_block(parent_hash)["result"]["block"] new_blocks.reverse() blocks.extend(new_blocks) save_bz2_pickle(blocks, cached_blocks_file) return blocks
def get_all_transfers(): """ retrieves all transfers on chain and caches will be REALLY slow with large downloads as calls are throttled. Key "last_height" stores last_height of block deploys have been sync up to. """ cached_transfers_file = DATA_PATH / "transfer_cache.pbz2" if Path.exists(cached_transfers_file): transfers = load_bz2_pickle(cached_transfers_file) else: transfers = {} cur_height = 0 cache_height = transfers.get("last_height", 0) blocks = get_all_blocks() print( f"Downloading transfers from block height {cache_height} to {blocks[-1]['header']['height']}" ) for block in blocks[cache_height:]: cur_height = block["header"]["height"] if cur_height < cache_height: continue for transfer_hash in block["header"]["transfer_hashes"]: if transfer_hash not in transfers.keys(): transfers[transfer_hash] = get_deploy(transfer_hash) transfers["last_height"] = cur_height save_bz2_pickle(transfers, cached_transfers_file) return transfers
def get_all_deploys(): """ retrieves all deploys on chain and caches will be REALLY slow with large downloads as calls are throttled. Key "last_height" stores last_height of block deploys have been sync up to. """ ANNOUNCE_INTERVAL = 100 cached_deploys_file = DATA_PATH / "deploy_cache.pbz2" if Path.exists(cached_deploys_file): deploys = load_bz2_pickle(cached_deploys_file) else: deploys = {} cur_height = 0 cache_height = deploys.get("last_height", 0) blocks = get_all_blocks() print( f"Downloading deploys from block height {cache_height} to {blocks[-1]['header']['height']}" ) announce_height = cache_height + ANNOUNCE_INTERVAL for block in blocks[cache_height:]: cur_height = block["header"]["height"] if cur_height < cache_height: continue for deploy_hash in block["header"]["deploy_hashes"]: if deploy_hash not in deploys.keys(): deploys[deploy_hash] = get_deploy(deploy_hash) if block["header"]["height"] == announce_height: print(f"At block {announce_height}") announce_height += ANNOUNCE_INTERVAL deploys["last_height"] = cur_height save_bz2_pickle(deploys, cached_deploys_file) return deploys
def load_ips(): try: if IPS_FILE.exists(): return load_bz2_pickle(IPS_FILE) return INTERNAL_NODES except Exception: # Should overwrite bad file after generating new ip list return INTERNAL_NODES
def geo_data_for_ip(ip) -> dict: geo_data = load_bz2_pickle(GEO_DATA) if GEO_DATA.exists() else {} if ip not in geo_data: geo_response = requests.get(f"http://api.ipstack.com/{ip}?access_key={ACCESS_KEY}") if geo_response.status_code == 200: geo_data[ip] = geo_response.json() save_bz2_pickle(geo_data, GEO_DATA) return geo_data[ip]
def network_info(network_name): # need to replace with template. net_info = load_bz2_pickle(network_info_path(network_name)) nodes = load_bz2_pickle(nodes_latest_path(network_name)) valid_ver = defaultdict(int) weight_pct = defaultdict(int) all_ver = defaultdict(int) node_count = 0 val_count = 0 for node in nodes.values(): node_count += 1 upgrade = "None" if node["next_upgrade"]: upgrade = node["next_upgrade"]["activation_point"] version = f"{node['api_version']} - Upgrade: {upgrade}" if node.get("is_validator"): weight = node.get("weight_percent", 0) weight_pct[version] += weight valid_ver[version] += 1 val_count += 1 all_ver[version] += 1 versions = [] for key, val in all_ver.items(): versions.append({ "version": key, "all_node_pct": round(val / node_count * 100, 2), "val_node_pct": round(valid_ver.get(key, 0) / val_count * 100, 2), "val_wgt_pct": round(weight_pct.get(key, 0), 2) }) return render_template('summary.html', peer_counts=net_info["peer_count"], path_counts=net_info["path_count"], versions=sorted(versions, key=lambda d: d["version"], reverse=True))
def dump_d10(): delta_10_nodes = data_path / "nodes-delta-10" d10_validate_ip = data_path / "validate_ip.pbz2" vip = load_bz2_pickle(d10_validate_ip) rvip = {ip: key for key, ip in vip.items()} with open(data_path / "delta_10_participation.dat", "w+") as f: for filename in delta_10_nodes.glob("*.pbz2"): timestamp = int( filename.name.split(".pbz2")[0].split("nodes_")[-1]) print(timestamp) data = load_bz2_pickle(filename) for ip, status in data.items(): key = rvip.get(ip, "") chainspec = status["chainspec_name"] build_version = status["build_version"] next_upgrade = status["next_upgrade"] if next_upgrade: activation_point = int(next_upgrade["activation_point"]) protocol_version = next_upgrade[ "protocol_version"] = "1.0.3" else: activation_point = "" protocol_version = "" api_version = status["api_version"] peer_count = len(status["peers"]) labi = status["last_added_block_info"] if labi: era_id = int(labi["era_id"]) height = int(labi["height"]) else: era_id = "" height = "" wdata = [ timestamp, ip, key, chainspec, build_version, api_version, peer_count, era_id, height, activation_point, protocol_version ] f.write("|".join([str(d) for d in wdata]) + "\n")
def get_all_era_info(): cached_era_info_file = DATA_PATH / "era_info.pbz2" if Path.exists(cached_era_info_file): era_info = load_bz2_pickle(cached_era_info_file) last_era = max(era_info.keys()) else: era_info = {} last_era = -1 blocks = get_all_blocks() print( f"Downloading era data from {last_era} to {blocks[-1]['header']['era_id']}" ) last_block_hash = blocks[0]["hash"] for block in blocks: cur_era = block["header"]["era_id"] if last_era < cur_era: last_era = cur_era era_info_by_switch = get_era_info_by_switch_block(last_block_hash) era_info[cur_era] = era_info_by_switch["result"]["era_summary"] last_block_hash = block["hash"] save_bz2_pickle(era_info, cached_era_info_file) return era_info
def verify_key(key): VALIDATION_FILE = DATA_FOLDER / "validate_ip.pbz2" ip_dict = load_bz2_pickle(VALIDATION_FILE) if key in ip_dict: return "IP registered." return "IP not registered."
from pickle_util import load_bz2_pickle from pathlib import Path from collections import defaultdict SCRIPT_DIR = Path(__file__).parent.absolute() NODES_PATH = SCRIPT_DIR / "data_cnm" / "nodes" IP_UPTIME_PATH = SCRIPT_DIR / "data_cnm" / "ip_uptime.csv" VALIDATE_IP_PATH = SCRIPT_DIR / "data_cnm" / "validate_ip.pbz2" vip = load_bz2_pickle(VALIDATE_IP_PATH) ip_key = {ip: key for key, ip in vip.items()} ips = defaultdict(int) check_count = 0 def get_height(status): last_added = status.get("last_added_block_info", None) if last_added is None: return 0 return last_added.get("height", 0) files = sorted(list(NODES_PATH.iterdir())) for file in files: print(file) height = set() check_count += 1 obj = load_bz2_pickle(file) for _, status in obj.items(): height.add(get_height(status))