def estimate_gps_for_all_sizes(window): print("estimate_gps_for_all_sizes across all workers") sys.stdout.flush() if len(window) < 2: return [] first_height = window[0].height last_height = window[-1].height first_grin_block = Blocks.get_by_height(first_height) last_grin_block = Blocks.get_by_height(last_height) assert first_grin_block is not None, "Missing grin block at height: {}".format( first_height) assert last_grin_block is not None, "Missing grin block at height: {}".format( last_height) valid_cnt = {} for pool_shares_rec in window: for shares in pool_shares_rec.shares: if shares.edge_bits not in valid_cnt: valid_cnt[shares.edge_bits] = 0 valid_cnt[shares.edge_bits] += shares.valid print("Valid Share Counts entire window:") pp.pprint(valid_cnt) all_gps = [] for sz, cnt in valid_cnt.items(): gps = lib.calculate_graph_rate(window[0].timestamp, window[-1].timestamp, cnt, sz, last_height) all_gps.append(( sz, gps, )) sys.stdout.flush() return all_gps
def estimate_gps_for_all_sizes(user_id, window): first_height = window[0].height last_height = window[-1].height print("estimate_gps_for_all_sizes: user_id: {}".format(user_id)) # print("All Worker_shares in the window:") # pp.pprint(window) first_grin_block = Blocks.get_by_height(first_height) last_grin_block = Blocks.get_by_height(last_height) assert first_grin_block is not None, "Missing grin block at height: {}".format(first_height) assert last_grin_block is not None, "Missing grin block at height: {}".format(last_height) # Get the Worker_shares in the window *for this user_id* this_workers_shares = [ws for ws in window if ws.user_id == user_id] print("This workers Shares records for the entire window: {}".format(len(this_workers_shares))) #pp.pprint(this_workers_shares) # Get a count of number of each valid solution size in this_workers_shares in this window valid_cnt = {} for worker_shares_rec in this_workers_shares: for shares in worker_shares_rec.shares: if shares.edge_bits not in valid_cnt: valid_cnt[shares.edge_bits] = 0 valid_cnt[shares.edge_bits] += shares.valid #print("Valid Share Counts entire window for {}:".format(user_id)) #pp.pprint(valid_cnt) # Calcualte the gps for each graph size in the window all_gps = [] for sz, cnt in valid_cnt.items(): gps = lib.calculate_graph_rate(window[0].timestamp, window[-1].timestamp, cnt) all_gps.append((sz, gps, )) sys.stdout.flush() return all_gps
def initialize(): database = lib.get_db() # Special case for new pool startup - Need 3 stats records to bootstrap block_zero = Blocks.get_by_height(0) seed_stat0 = Grin_stats(height=0, timestamp=block_zero.timestamp, gps=0, difficulty=block_zero.total_difficulty, total_utxoset_size=block_zero.num_inputs) database.db.createDataObj(seed_stat0) block_one = Blocks.get_by_height(1) seed_stat1 = Grin_stats(height=1, timestamp=block_one.timestamp, gps=0, difficulty=block_one.total_difficulty - block_zero.total_difficulty, total_utxoset_size=seed_stat0.total_utxoset_size + block_one.num_outputs - block_one.num_inputs) database.db.createDataObj(seed_stat1) block_two = Blocks.get_by_height(2) seed_stat2 = Grin_stats(height=2, timestamp=block_two.timestamp, gps=0, difficulty=block_two.total_difficulty - block_one.total_difficulty, total_utxoset_size=seed_stat1.total_utxoset_size + block_two.num_outputs - block_two.num_inputs) database.db.createDataObj(seed_stat2)
def calculate(height, avg_range): # Get the most recent pool data from which to generate the stats previous_stats_record = Pool_stats.get_by_height(height - 1) assert previous_stats_record is not None, "No previous stats record found" avg_over_first_grin_block = Blocks.get_by_height(max( height - avg_range, 1)) assert avg_over_first_grin_block is not None, "Missing grin block: {}".format( max(height - avg_range, 1)) grin_block = Blocks.get_by_height(height) assert grin_block is not None, "Missing grin block: {}".format(height) latest_worker_shares = Worker_shares.get_by_height(height) # If no shares are found for this height, we have 2 options: # 1) Assume the share data is *delayed* so dont create the stats record now # assert len(latest_worker_shares) > 0, "No worker shares found" # 2) If we want we can create the record without share data and then when shares are added later this record will be recalculated avg_over_worker_shares = Worker_shares.get_by_height(height, avg_range) # Calculate the stats data timestamp = grin_block.timestamp difficulty = POOL_MIN_DIFF # XXX TODO - enchance to support multiple difficulties gps = 0 active_miners = 0 shares_processed = 0 num_shares_in_range = 0 if len(avg_over_worker_shares) > 0: num_shares_in_range = sum( [shares.valid for shares in avg_over_worker_shares]) gps = grin.calculate_graph_rate(difficulty, avg_over_first_grin_block.timestamp, grin_block.timestamp, num_shares_in_range) print("XXX: difficulty={}, {}-{}, len={}".format( difficulty, avg_over_first_grin_block.timestamp, grin_block.timestamp, num_shares_in_range)) if latest_worker_shares is not None: active_miners = len(latest_worker_shares) # XXX NO, FIX THIS num_valid = sum([shares.valid for shares in latest_worker_shares]) num_invalid = sum([shares.invalid for shares in latest_worker_shares]) shares_processed = num_valid + num_invalid total_shares_processed = previous_stats_record.total_shares_processed + shares_processed total_grin_paid = previous_stats_record.total_grin_paid # XXX TODO total_blocks_found = previous_stats_record.total_blocks_found if Pool_blocks.get_by_height(height - 1) is not None: total_blocks_found = total_blocks_found + 1 return Pool_stats(height=height, timestamp=timestamp, gps=gps, active_miners=active_miners, shares_processed=shares_processed, total_shares_processed=total_shares_processed, total_grin_paid=total_grin_paid, total_blocks_found=total_blocks_found)
def get(self, height=0, range=None, fields=None): database = lib.get_db() fields = lib.fields_to_list(fields) if height == 0: height = grin.get_current_height() if range == None: block = Blocks.get_by_height(height) if block is None: return None return block.to_json(fields) else: blocks = [] for block in Blocks.get_by_height(height, range): blocks.append(block.to_json(fields)) return blocks
def get_reward_by_block(height): # Get the block and determine how much its worth to the winner theblock = Blocks.get_by_height(height) #print("The block {}".format(theblock.to_json())) if theblock is None: return 0 return get_block_reward_nanogrin() + theblock.fee
def calculate(height, avg_range=DIFFICULTY_ADJUST_WINDOW): # Get the most recent blocks from which to generate the stats recent_blocks = [] previous_stats_record = Grin_stats.get_by_height(height-1) print("XXX: {}".format(previous_stats_record)) assert previous_stats_record is not None, "No provious stats record found" recent_blocks = Blocks.get_by_height(height, avg_range) if len(recent_blocks) < min(avg_range, height): # We dont have all of these blocks in the DB raise AssertionError("Missing blocks in range: {}:{}".format(height-avg_range, height)) assert recent_blocks[-1].height == height, "Invalid height in recent_blocks[-1]" assert recent_blocks[-2].height == height - 1, "Invalid height in recent_blocks[-2]: {} vs {}".format(recent_blocks[-2].height, height - 1) # Calculate the stats data first_block = recent_blocks[0] last_block = recent_blocks[-1] timestamp = last_block.timestamp difficulty = recent_blocks[-1].total_difficulty - recent_blocks[-2].total_difficulty new_stats = Grin_stats( height = height, timestamp = timestamp, difficulty = difficulty, ) # Caclulate estimated GPS for recent edge_bits sizes all_gps = estimate_all_gps(recent_blocks) for gps in all_gps: gps_rec = Gps( edge_bits = gps[0], gps = gps[1], ) new_stats.gps.append(gps_rec) return new_stats
def get_reward_by_block(height): # Get the block and determine how much its worth to the winner database = lib.get_db() theblock = Blocks.get_by_height(height) #print("The block {}".format(theblock.to_json())) if theblock is None: return 0 return 60 * 1000000000 + theblock.fee
def main(): global LOGGER global CONFIG CONFIG = lib.get_config() LOGGER = lib.get_logger(PROCESS) LOGGER.warn("=== Starting {}".format(PROCESS)) # Connect to DB database = lib.get_db() grin_api_url = "http://" + CONFIG["grin_node"]["address"] + ":" + CONFIG["grin_node"]["api_port"] status_url = grin_api_url + "/v1/status" blocks_url = grin_api_url + "/v1/blocks/" validation_depth = int(CONFIG[PROCESS]["validation_depth"]) response = requests.get(status_url) latest = int(response.json()["tip"]["height"]) last = latest - validation_depth # start a reasonable distance back if last < 0: last = 1 LOGGER.warn("Starting from block #{}".format(last)) # last = 0 for i in range(last, latest): url = blocks_url + str(i) response = requests.get(url).json() # print("{}: {}".format(response["header"]["height"], response["header"]["hash"])) try: rec = Blocks.get_by_height([i]) if rec is not None: if rec.hash != response["header"]["hash"] and rec.state != "orphan": LOGGER.warn("Found an orphan - height: {}, hash: {} vs {}".format(rec.height, rec.hash, response["header"]["hash"])) rec.state = "orphan" database.db.getSession().commit() else: LOGGER.warn("Adding missing block - height: {}".format(response["header"]["height"])) # XXX TODO: Probably want to mark it as "missing" so we know it was filled in after the fact? missing_block = Blocks(hash=response["header"]["hash"], version=response["header"]["version"], height = response["header"]["height"], previous = response["header"]["previous"], timestamp = response["header"]["timestamp"][:-1], output_root = response["header"]["output_root"], range_proof_root = response["header"]["range_proof_root"], kernel_root = response["header"]["kernel_root"], nonce = response["header"]["nonce"], total_difficulty = response["header"]["total_difficulty"], total_kernel_offset = response["header"]["total_kernel_offset"], state = "missing") database.db.createDataObj(missing_block) except Exception as e: # XXX TODO: Something more ? LOGGER.error("Something went wrong: {}".format(e)) sys.stdout.flush() # db.set_last_run(PROCESS, str(time.time())) database.db.getSession().commit()
def calculate(height, avg_range): avg_over_first_grin_block = Blocks.get_by_height( max(height-avg_range, 1) ) assert avg_over_first_grin_block is not None, "Missing grin block: {}".format(max(height-avg_range, 1)) grin_block = Blocks.get_by_height(height) assert grin_block is not None, "Missing grin block: {}".format(height) # Get all workers share records for the current range of blocks latest_worker_shares = Worker_shares.get_by_height(height) # assert len(latest_worker_shares) != 0, "Missing worker shares record for height {}".format(height) avg_over_worker_shares = Worker_shares.get_by_height(height, avg_range) # Create a worker_stats for each user who submitted a share in this range workers = list(set([share.worker for share in latest_worker_shares])) new_stats = [] for worker in workers: # Get this workers most recent worker_stats record (for running totals) last_stat = Worker_stats.get_latest_by_id(worker) if last_stat is None: # A new worker last_stat = Worker_stats(None, datetime.utcnow(), height-1, worker, 0, 0, 0, 0, 0, 0) new_stats.append(last_stat) # Calculate this workers stats data timestamp = grin_block.timestamp difficulty = POOL_MIN_DIFF # XXX TODO - enchance to support multiple difficulties num_shares_in_range = sum([shares.valid for shares in avg_over_worker_shares if shares.worker == worker]) gps = grin.calculate_graph_rate(difficulty, avg_over_first_grin_block.timestamp, grin_block.timestamp, num_shares_in_range) num_valid_this_block = [shares.valid for shares in latest_worker_shares if shares.worker == worker][0] num_invalid_this_block = [shares.invalid for shares in latest_worker_shares if shares.worker == worker][0] shares_processed = num_valid_this_block + num_invalid_this_block # latest_worker_shares = [share for share in latest_pool_shares if share.found_by == worker] #shares_processed = len(worker_shares_this_block) total_shares_processed = last_stat.total_shares_processed + shares_processed stats = Worker_stats( id = None, height = height, timestamp = timestamp, worker = worker, gps = gps, shares_processed = shares_processed, total_shares_processed = total_shares_processed, grin_paid = 123, # XXX TODO total_grin_paid = 456, # XXX TODO balance = 1) # XXX TODO new_stats.append(stats) return new_stats
def calculate(height, window_size): # Get the most recent pool data from which to generate the stats previous_stats_record = Pool_stats.get_by_height(height - 1) assert previous_stats_record is not None, "No previous Pool_stats record found" grin_block = Blocks.get_by_height(height) assert grin_block is not None, "Missing grin block: {}".format(height) window = Worker_shares.get_by_height(height, window_size) # assert window[-1].height - window[0].height >= window_size, "Failed to get proper window size" # print("Sanity: window size: {} vs {}".format(window[-1].height - window[0].height, window_size)) # Calculate the stats data timestamp = grin_block.timestamp active_miners = len(list(set([s.user_id for s in window]))) print("active_miners = {}".format(active_miners)) # Keep track of share totals - sum counts of all share sizes submitted for this block num_shares_processed = 0 share_counts = {} for ws in Worker_shares.get_by_height(height): num_shares_processed += ws.num_shares() for size in ws.sizes(): size_str = "{}{}".format("C", size) if size_str not in share_counts: share_counts[size_str] = {"valid": 0, "invalid": 0, "stale": 0} share_counts[size_str] = { "valid": share_counts[size_str]["valid"] + ws.num_valid(size), "invalid": share_counts[size_str]["invalid"] + ws.num_invalid(size), "stale": share_counts[size_str]["stale"] + ws.num_stale(size) } print("num_shares_processed this block= {}".format(num_shares_processed)) total_shares_processed = previous_stats_record.total_shares_processed + num_shares_processed total_blocks_found = previous_stats_record.total_blocks_found # Caclulate estimated GPS for all sizes with shares submitted all_gps = estimate_gps_for_all_sizes(window) if Pool_blocks.get_by_height(height - 1) is not None: total_blocks_found = total_blocks_found + 1 new_stats = Pool_stats( height=height, timestamp=timestamp, active_miners=active_miners, share_counts=share_counts, shares_processed=num_shares_processed, total_blocks_found=total_blocks_found, total_shares_processed=total_shares_processed, dirty=False, ) print("all_gps for all pool workers") pp.pprint(all_gps) for gps_est in all_gps: gps_rec = Gps(edge_bits=gps_est[0], gps=gps_est[1]) new_stats.gps.append(gps_rec) sys.stdout.flush() return new_stats
def addPoolBlock(logger, timestamp, height, hash, found_by, serverid): global POOLBLOCK_MUTEX POOLBLOCK_MUTEX.acquire() database = lib.get_db() try: logger.warn( "Adding A PoolBlock: Timestamp: {}, ServerID: {}, Height: {}, Hash: {}" .format(timestamp, serverid, height, hash)) state = "new" this_block = Blocks.get_by_height(height) while this_block is None: this_block = Blocks.get_by_height(height) time.sleep(1) nonce = this_block.nonce actual_difficulty = grin.difficulty(this_block.hash, this_block.edge_bits, this_block.secondary_scaling) net_difficulty = grin.get_network_difficulty(height) # Create the DB record new_pool_block = Pool_blocks(hash=hash, height=height, nonce=nonce, actual_difficulty=actual_difficulty, net_difficulty=net_difficulty, timestamp=timestamp, found_by=found_by, state=state) duplicate = lib.get_db().db.createDataObj_ignore_duplicates( new_pool_block) if duplicate: logger.warn( "Failed to add duplicate Pool Block: {}".format(height)) else: logger.warn("Added Pool Block: {}".format(height)) finally: POOLBLOCK_MUTEX.release()
def get(self, height=None, range=None, fields=None): LOGGER = lib.get_logger(PROCESS) LOGGER.warn("GrinAPI_blocks get height:{} range:{} fields:{}".format( height, range, fields)) fields = lib.fields_to_list(fields) if height is None or height == 0: blocks = Blocks.get_latest(range) else: blocks = Blocks.get_by_height(height, range) if range == None: if blocks is None: return None return blocks.to_json(fields) else: bl = [] for block in blocks: bl.append(block.to_json(fields)) return bl
def calculate(height, window_size): # Get the most recent pool data from which to generate the stats previous_stats_record = Pool_stats.get_by_height(height - 1) assert previous_stats_record is not None, "No previous Pool_stats record found" grin_block = Blocks.get_by_height(height) assert grin_block is not None, "Missing grin block: {}".format(height) window = Worker_shares.get_by_height(height, window_size) # Calculate the stats data timestamp = grin_block.timestamp active_miners = len(list(set([s.worker for s in window]))) print("active_miners = {}".format(active_miners)) # Keep track of share totals - sum counts of all share sizes submitted for this block shares_processed = 0 if len(window) > 0: shares_processed = window[-1].num_shares() print("shares_processed this block= {}".format(shares_processed)) total_shares_processed = previous_stats_record.total_shares_processed + shares_processed total_grin_paid = previous_stats_record.total_grin_paid # XXX TODO total_blocks_found = previous_stats_record.total_blocks_found # Caclulate estimated GPS for all sizes with shares submitted all_gps = estimate_gps_for_all_sizes(window) if Pool_blocks.get_by_height(height - 1) is not None: total_blocks_found = total_blocks_found + 1 new_stats = Pool_stats( height=height, timestamp=timestamp, active_miners=active_miners, shares_processed=shares_processed, total_blocks_found=total_blocks_found, total_shares_processed=total_shares_processed, total_grin_paid=total_grin_paid, dirty=False, ) print("all_gps for all pool workers") pp.pprint(all_gps) for gps_est in all_gps: gps_rec = Gps(edge_bits=gps_est[0], gps=gps_est[1]) new_stats.gps.append(gps_rec) sys.stdout.flush() return new_stats
def calculate(height, avg_range): # Get the most recent blocks from which to generate the stats recent_blocks = [] previous_stats_record = Grin_stats.get_by_height(height - 1) print("XXX: {}".format(previous_stats_record)) assert previous_stats_record is not None, "No provious stats record found" recent_blocks = Blocks.get_by_height(height, avg_range) if len(recent_blocks) < min(avg_range, height): # We dont have all of these blocks in the DB raise AssertionError("Missing blocks in range: {}:{}".format( height - avg_range, height)) print(recent_blocks[-1]) print(recent_blocks[-2]) print(recent_blocks[-3]) print(recent_blocks[-4]) assert recent_blocks[ -1].height == height, "Invalid height in recent_blocks[-1]" assert recent_blocks[ -2].height == height - 1, "Invalid height in recent_blocks[-2]: {} vs {}".format( recent_blocks[-2].height, height - 1) # Calculate the stats data first_block = recent_blocks[0] last_block = recent_blocks[-1] timestamp = last_block.timestamp difficulty = recent_blocks[-1].total_difficulty - recent_blocks[ -2].total_difficulty gps = lib.calculate_graph_rate(difficulty, first_block.timestamp, last_block.timestamp, len(recent_blocks)) # utxo set size = sum outputs - sum inputs total_utxoset_size = previous_stats_record.total_utxoset_size + last_block.num_outputs - last_block.num_inputs return Grin_stats( height=height, timestamp=timestamp, gps=gps, difficulty=difficulty, total_utxoset_size=total_utxoset_size, )
def main(): global LOGGER global CONFIG CONFIG = lib.get_config() LOGGER = lib.get_logger(PROCESS) LOGGER.warn("=== Starting {}".format(PROCESS)) # Connect to DB database = lib.get_db() atexit.register(lib.teardown_db) validation_depth = int(CONFIG[PROCESS]["validation_depth"]) latest = grin.get_current_height( ) - 10 # stop 10 blocks from current to avoid overrunning the blockWatcher last_block_record = Blocks.get_latest() if last_block_record == None: last_block_record_height = 0 else: last_block_record_height = last_block_record.height last = min(latest - validation_depth, last_block_record_height - validation_depth) # start a reasonable distance back if last < 0: last = 0 LOGGER.warn("Starting from block #{}".format(last)) for i in range(last, latest + 1): if i % 100 == 0: LOGGER.warn("Processing #{}".format(i)) response = grin.blocking_get_block_by_height(i) assert (response is not None) assert (int(response["header"]["height"]) == i) #print("{}: {}".format(response["header"]["height"], response["header"]["hash"])) try: database.db.initializeSession() rec = Blocks.get_by_height( i) # Get existing entry from the DB (if any) if rec is not None: # Test if we have an orphan thats not already marked # Dont update any block info in the orphan, just mark the state if rec.hash != response["header"][ "hash"] and rec.state != "orphan": LOGGER.warn( "Found an orphan - height: {}, hash: {} vs {}".format( rec.height, rec.hash, response["header"]["hash"])) rec.state = "orphan" database.db.getSession().commit() else: # If it was not in the DB then we should add it now LOGGER.warn("Adding missing block - height: {}".format( response["header"]["height"])) missing_block = Blocks( hash=response["header"]["hash"], version=response["header"]["version"], height=response["header"]["height"], previous=response["header"]["previous"], timestamp=datetime.strptime( response["header"]["timestamp"][:-1], "%Y-%m-%dT%H:%M:%S+00:0"), output_root=response["header"]["output_root"], range_proof_root=response["header"]["range_proof_root"], kernel_root=response["header"]["kernel_root"], nonce=response["header"]["nonce"], edge_bits=response["header"]["edge_bits"], total_difficulty=response["header"]["total_difficulty"], secondary_scaling=response["header"]["secondary_scaling"], num_inputs=len(response["inputs"]), num_outputs=len(response["outputs"]), num_kernels=len(response["kernels"]), fee=sum(k["fee"] for k in response["kernels"]), lock_height=response["kernels"][0]["lock_height"] if len(response["kernels"]) > 0 else 0, total_kernel_offset=response["header"] ["total_kernel_offset"], state="missing") database.db.createDataObj(missing_block) except Exception as e: LOGGER.error("Something went wrong: {} - {}".format( e, traceback.print_stack())) database.db.getSession().rollback() database.db.destroySession() sys.stdout.flush() time.sleep(0.1) # dont be too aggressive LOGGER.warn("=== Completed {}".format(PROCESS))
def calculate(height, window_size): database = lib.get_db() grin_block = Blocks.get_by_height(height) assert grin_block is not None, "Missing grin block at height: {}".format(height) # Get all Worker_share records in the estimation window window = Worker_shares.get_by_height(height, window_size) # Get list of all workers who submitted shares OR recvd a payment at this height pmts = Pool_payment.get_by_height(height) shares_workers = [share.user_id for share in window] pmts_workers = [pmt.user_id for pmt in pmts] workers = list(set(shares_workers + pmts_workers)) # Create a new Worker_stats record for each of these workers print("Calcualte worker stats for height {}, workers {}".format(height, workers)) new_stats = [] for worker in workers: # Get this workers most recent worker_stats record (for running totals) last_stat = Worker_stats.get_latest_by_id(worker) if last_stat is None: # A new worker, initialize a last_stat for the previous block last_stat = Worker_stats( timestamp=datetime.utcnow(), height=height-1, user_id=worker) new_stats.append(last_stat) # Calculate this workers stats data timestamp = grin_block.timestamp # Caclulate estimated GPS for all sizes with shares submitted all_gps = estimate_gps_for_all_sizes(worker, window) # Keep track of share totals - sum counts of all share sizes submitted for this block print("Looking up shares for height {} user {}".format(height, worker)) this_workers_shares_this_block = Worker_shares.get_by_height_and_id(height, worker) print("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX {}".format(this_workers_shares_this_block)) if this_workers_shares_this_block is None or len(this_workers_shares_this_block) == 0: this_workers_valid_shares = 0 this_workers_invalid_shares = 0 this_workers_stale_shares = 0 else: this_workers_shares_this_block = this_workers_shares_this_block[-1] this_workers_valid_shares = this_workers_shares_this_block.num_valid() this_workers_invalid_shares = this_workers_shares_this_block.num_invalid() this_workers_stale_shares = this_workers_shares_this_block.num_stale() this_workers_total_valid_shares = last_stat.total_valid_shares + this_workers_valid_shares this_workers_total_invalid_shares = last_stat.total_invalid_shares + this_workers_invalid_shares this_workers_total_stale_shares = last_stat.total_stale_shares + this_workers_stale_shares # XXX PERFORAMCE = removed bulk insert to debug some other issue, need to put bulk insert back!!! stats = Worker_stats( height = height, timestamp = timestamp, user_id = worker, valid_shares = this_workers_valid_shares, invalid_shares = this_workers_invalid_shares, stale_shares = this_workers_stale_shares, total_valid_shares = this_workers_total_valid_shares, total_invalid_shares = this_workers_total_invalid_shares, total_stale_shares = this_workers_total_stale_shares, ) database.db.getSession().add(stats) database.db.getSession().commit() #print("AAA: Created Worker_stats with id={}".format(stats.id)) # print("all_gps for worker {}:".format(worker)) # pp.pprint(all_gps) for gps_est in all_gps: gps_rec = Gps( edge_bits = gps_est[0], gps = gps_est[1], ) stats.gps.append(gps_rec) #print("AAA: Appended gps record to Worker_stats: {}".format(gps_rec)) # gps_rec.worker_stats_id = stats.id, # database.db.getSession().add(gps_rec) new_stats.append(stats) database.db.getSession().add(stats) database.db.getSession().commit() sys.stdout.flush() return new_stats
def calculate(height, window_size): database = lib.get_db() grin_block = Blocks.get_by_height(height) assert grin_block is not None, "Missing grin block at height: {}".format( height) # Get all Worker_share records in the estimation window window = Worker_shares.get_by_height(height, window_size) # Get list of all workers who submitted shares in the window workers = list(set([share.worker for share in window])) # Create a new Worker_stats record for each of these workers print("Calcualte worker stats for height {}, workers {}".format( height, workers)) new_stats = [] for worker in workers: # Get this workers most recent worker_stats record (for running totals) last_stat = Worker_stats.get_latest_by_id(worker) if last_stat is None: # A new worker, initialize a last_stat for the previous block last_stat = Worker_stats(None, datetime.utcnow(), height - 1, worker, 0, 0, 0, 0, 0, 0) new_stats.append(last_stat) # Calculate this workers stats data timestamp = grin_block.timestamp # Caclulate estimated GPS for all sizes with shares submitted all_gps = estimate_gps_for_all_sizes(worker, window) # Keep track of share totals - sum counts of all share sizes submitted for this block this_workers_shares = [ws for ws in window if ws.worker == worker] num_shares_processed = this_workers_shares[-1].num_shares() print("num_shares_processed={}".format(num_shares_processed)) total_shares_processed = last_stat.total_shares_processed + num_shares_processed print("total_shares_processed={}".format(total_shares_processed)) # XXX PERFORAMCE = could not get bulk_insert to work... stats = Worker_stats( id=None, height=height, timestamp=timestamp, worker=worker, shares_processed=num_shares_processed, total_shares_processed=total_shares_processed, grin_paid=123, # XXX TODO total_grin_paid=456, # XXX TODO balance=1) # XXX TODO database.db.getSession().add(stats) database.db.getSession().commit() #print("AAA: Created Worker_stats with id={}".format(stats.id)) # print("all_gps for worker {}:".format(worker)) # pp.pprint(all_gps) for gps_est in all_gps: gps_rec = Gps( edge_bits=gps_est[0], gps=gps_est[1], ) stats.gps.append(gps_rec) #print("AAA: Appended gps record to Worker_stats: {}".format(gps_rec)) # gps_rec.worker_stats_id = stats.id, # database.db.getSession().add(gps_rec) # new_stats.append(stats) database.db.getSession().add(stats) database.db.getSession().commit() sys.stdout.flush() return new_stats
def get_scale_by_block(height): # Get the block and determine its secondary_scale value database = lib.get_db() theblock = Blocks.get_by_height(height) return theblock.secondary_scaling
def get_network_difficulty(height): latest_blocks = Blocks.get_by_height(height, 2) return latest_blocks[1].total_difficulty - latest_blocks[0].total_difficulty