def cleanup(chain, oldest_kept, simulate): for cp in Block.query.filter_by(hash=oldest_kept).one().chain_payouts: if cp.chainid == chain: oldest_kept = cp.solve_slice break print "Current slice index {}".format(redis_conn.get("chain_1_slice_index")) print "Looking at all slices older than {}".format(oldest_kept) simulate = bool(int(simulate)) if not simulate: if raw_input("Are you sure you want to continue? [y/n]") != "y": return empty = 0 for i in xrange(oldest_kept, 0, -1): if empty >= 20: print "20 empty in a row, exiting" break key = "chain_{}_slice_{}".format(chain, i) if not redis_conn.llen(key): empty += 1 else: empty = 0 if not simulate: print "deleting {}!".format(key) print redis_conn.delete(key) else: print "would delete {}".format(key)
def collect_minutes(): """ Grabs all the pending minute shares out of redis and puts them in the database """ unproc_mins = redis_conn.keys("min_*") for key in unproc_mins: current_app.logger.info("Processing key {}".format(key)) share_type, algo, stamp = key.split("_")[1:] minute = datetime.datetime.utcfromtimestamp(float(stamp)) # To ensure invalid stampt don't get committed minute = ShareSlice.floor_time(minute, 0) if stamp < (time.time() - 30): current_app.logger.info("Skipping timestamp {}, too young" .format(minute)) continue redis_conn.rename(key, "processing_shares") for user, shares in redis_conn.hgetall("processing_shares").iteritems(): shares = float(shares) # messily parse out the worker/address combo... parts = user.split(".") if len(parts) > 0: worker = parts[1] else: worker = '' address = parts[0] if not address.startswith("pool"): try: curr = currencies.lookup_payable_addr(address) except InvalidAddressException: curr = None if curr is None: address = global_config.pool_payout_currency.pool_payout_addr try: slc = ShareSlice(user=address, time=minute, worker=worker, algo=algo, share_type=share_type, value=shares, span=0) db.session.add(slc) db.session.commit() except sqlalchemy.exc.IntegrityError: db.session.rollback() slc = ShareSlice.query.with_lockmode('update').filter_by( user=address, time=minute, worker=worker, algo=algo, share_type=share_type).one() slc.value += shares db.session.commit() redis_conn.delete("processing_shares")
def collect_minutes(): """ Grabs all the pending minute shares out of redis and puts them in the database """ unproc_mins = redis_conn.keys("min_*") for key in unproc_mins: current_app.logger.info("Processing key {}".format(key)) share_type, algo, stamp = key.split("_")[1:] minute = datetime.datetime.utcfromtimestamp(float(stamp)) # To ensure invalid stampt don't get committed minute = ShareSlice.floor_time(minute, 0) if stamp < (time.time() - 30): current_app.logger.info("Skipping timestamp {}, too young" .format(minute)) continue redis_conn.rename(key, "processing_shares") for user, shares in redis_conn.hgetall("processing_shares").iteritems(): shares = float(shares) # messily parse out the worker/address combo... parts = user.split(".") if len(parts) > 0: worker = parts[1] else: worker = '' address = parts[0] if address != "pool": try: curr = currencies.lookup_payable_addr(address) except InvalidAddressException: curr = None if not curr: address = global_config.pool_payout_currency.pool_payout_addr try: slc = ShareSlice(user=address, time=minute, worker=worker, algo=algo, share_type=share_type, value=shares, span=0) db.session.add(slc) db.session.commit() except sqlalchemy.exc.IntegrityError: db.session.rollback() slc = ShareSlice.query.with_lockmode('update').filter_by( user=address, time=minute, worker=worker, algo=algo, share_type=share_type).one() slc.value += shares db.session.commit() redis_conn.delete("processing_shares")
def _grab_data(prefix, stat): proc_name = "processing_{}".format(stat) unproc_mins = redis_conn.keys(prefix) for key in unproc_mins: current_app.logger.info("Processing key {}".format(key)) try: (stamp, ) = key.split("_")[1:] except Exception: current_app.logger.error("Error processing key {}".format(key), exc_info=True) continue minute = datetime.datetime.utcfromtimestamp(float(stamp)) # To ensure invalid stampt don't get committed minute = ShareSlice.floor_time(minute, 0) if stamp < (time.time() - 30): current_app.logger.info("Skipping timestamp {}, too young".format(minute)) continue redis_conn.rename(key, proc_name) for user, value in redis_conn.hgetall(proc_name).iteritems(): try: address, worker, did = user.split("_") try: value = float(value) except ValueError: if value != "None": current_app.logger.warn( "Got bogus value {} from ppagent for stat {}" .format(value, stat), exc_info=True) continue # Megahashes are was cgminer reports if stat == "hashrate": value *= 1000000 except Exception: current_app.logger.error("Error processing key {} on hash {}" .format(user, key), exc_info=True) continue try: slc = DeviceSlice(user=address, time=minute, worker=worker, device=did, stat=stat, value=value, span=0) db.session.add(slc) db.session.commit() except sqlalchemy.exc.IntegrityError: current_app.logger.warn("SQLAlchemy collision", exc_info=True) db.session.rollback() redis_conn.delete(proc_name)
def cleanup(chain, oldest_kept, simulate, empty): """ Given the oldest block hash that you desire to hold shares for, delete everything older than it. """ for cp in Block.query.filter_by(hash=oldest_kept).one().chain_payouts: if cp.chainid == chain: oldest_kept = cp.solve_slice break current_app.logger.info( "Current slice index {}".format(redis_conn.get("chain_1_slice_index"))) current_app.logger.info( "Looking at all slices older than {}".format(oldest_kept)) simulate = bool(int(simulate)) if not simulate: if raw_input("Are you sure you want to continue? [y/n]") != "y": return empty_found = 0 for i in xrange(oldest_kept, 0, -1): if empty_found >= empty: current_app.logger.info("20 empty in a row, exiting") break key = "chain_{}_slice_{}".format(chain, i) if redis_conn.type(key) == 'none': empty_found += 1 else: empty_found = 0 if not simulate: current_app.logger.info("deleting {}!".format(key)) current_app.logger.info(redis_conn.delete(key)) else: current_app.logger.info("would delete {}".format(key))
def cleanup(chain, oldest_kept, simulate, empty): """ Given the oldest block hash that you desire to hold shares for, delete everything older than it. """ for cp in Block.query.filter_by(hash=oldest_kept).one().chain_payouts: if cp.chainid == chain: oldest_kept = cp.solve_slice break current_app.logger.info("Current slice index {}".format( redis_conn.get("chain_1_slice_index"))) current_app.logger.info( "Looking at all slices older than {}".format(oldest_kept)) simulate = bool(int(simulate)) if not simulate: if raw_input("Are you sure you want to continue? [y/n]") != "y": return empty_found = 0 for i in xrange(oldest_kept, 0, -1): if empty_found >= empty: current_app.logger.info("20 empty in a row, exiting") break key = "chain_{}_slice_{}".format(chain, i) if redis_conn.type(key) == 'none': empty_found += 1 else: empty_found = 0 if not simulate: current_app.logger.info("deleting {}!".format(key)) current_app.logger.info(redis_conn.delete(key)) else: current_app.logger.info("would delete {}".format(key))
def credit_block(redis_key, simulate=False): """ Calculates credits for users from share records for the latest found block. """ # Don't do this truthiness thing if simulate is not True: simulate = False if simulate: current_app.logger.warn( "Running in simulate mode, no DB commit will be performed") current_app.logger.setLevel(logging.DEBUG) data = redis_conn.hgetall(redis_key) current_app.logger.debug("Processing block with details {}".format(data)) merged = bool(int(data.get('merged', False))) # If start_time isn't listed explicitly do our best to derive from # statistical share records if 'start_time' in data: time_started = datetime.datetime.utcfromtimestamp( float(data.get('start_time'))) else: time_started = last_block_time(data['algo'], merged=merged) if data['fees'] == "None": data['fees'] = 0 block = Block( user=data.get('address'), height=data['height'], total_value=(Decimal(data['total_subsidy']) / 100000000), transaction_fees=(Decimal(data['fees']) / 100000000), difficulty=bits_to_difficulty(data['hex_bits']), hash=data['hash'], time_started=time_started, currency=data['currency'], worker=data.get('worker'), found_at=datetime.datetime.utcfromtimestamp(float(data['solve_time'])), algo=data['algo'], merged=merged) db.session.add(block) db.session.flush() # Parse out chain results from the block key chain_data = {} chain_default = {'shares': Decimal('0')} for key, value in data.iteritems(): if key.startswith("chain_"): _, chain_id, key = key.split("_", 2) chain_id = int(chain_id) chain = chain_data.setdefault(chain_id, chain_default.copy()) chain['id'] = chain_id if key == "shares": value = Decimal(value) elif key == "solve_index": value = int(value) # XXX: Could do extra check for setting duplicate data (overrite) here chain[key] = value # Objectize the data. Use object to store all information moving forward chains = [] for id, chain in chain_data.iteritems(): if chain['shares'] == 0: continue cpo = ChainPayout(chainid=id, block=block, solve_slice=chain['solve_index'], chain_shares=chain['shares']) cpo.user_shares = {} cpo.credits = {} db.session.add(cpo) chains.append(cpo) # XXX: Would be good to check compositeprimarykey integrity here, but will # fail on other constraints #db.session.flush() # XXX: Change to a tabulate print current_app.logger.info("Parsed out chain data of {}".format(chain_data)) # Distribute total block value among chains share_distrib = {chain.chainid: chain.chain_shares for chain in chains} distrib = distributor(block.total_value, share_distrib) for chain in chains: chain.amount = distrib[chain.chainid] # Fetch the share distribution for this payout chain users = set() for chain in chains: # Actually fetch the shares from redis! chain.user_shares = chain.config_obj.calc_shares(chain) # If we have nothing, default to paying out the block finder everything if not chain.user_shares: chain.user_shares[block.user] = 1 # Add the users to the set, no dups users.update(chain.user_shares.keys()) # Record how many shares were used to payout chain.payout_shares = sum(chain.user_shares.itervalues()) # Grab all possible user based settings objects for all chains custom_settings = {} if users: custom_settings = {s.user: s for s in UserSettings.query.filter( UserSettings.user.in_(users)).all()} # XXX: Double check that currency code lookups will work relying on # currency obj hashability # The currencies that are valid to pay out in from this block. Basically, # this block currency + all buyable currencies if this block's currency is # sellable valid_currencies = [block.currency_obj] if block.currency_obj.sellable is True: valid_currencies.extend(currencies.buyable_currencies) pool_payout = block.currency_obj.pool_payout def filter_valid(user, address, currency): try: if isinstance(currency, basestring): currency = currencies[currency] except KeyError: current_app.logger.debug( "Converted user {}, addr {}, currency {} => pool addr" " because invalid currency" .format(user, address, currency)) return pool_payout if currency not in valid_currencies: current_app.logger.debug( "Converted user {}, addr {}, currency {} => pool addr" " because invalid currency" .format(user, address, currency)) return pool_payout return dict(address=address, currency=currency, user=user) # Parse usernames and user settings to build appropriate credit objects for chain in chains: for username in chain.user_shares.keys(): try: version = address_version(username) except Exception: # Give these shares to the pool, invalid address version chain.make_credit_obj(shares=chain.user_shares[username], **pool_payout) continue currency = currencies.version_map.get(version) # Check to see if we need to treat them real special :p settings = custom_settings.get(username) shares = chain.user_shares.pop(username) if settings: converted = settings.apply( shares, currency, block.currency, valid_currencies) # Check to make sure no funny business assert sum(c[2] for c in converted) == shares, "Settings apply function returned bad stuff" # Create the separate payout objects from settings return info for address, currency, shares in converted: chain.make_credit_obj( shares=shares, **filter_valid(username, address, currency)) else: # (try to) Payout directly to mining address chain.make_credit_obj( shares=shares, **filter_valid(username, username, currency)) # Calculate the portion that each user recieves for chain in chains: chain.distribute() # Another double check paid = 0 fees_collected = 0 donations_collected = 0 for chain in chains: chain_fee_perc = chain.config_obj.fee_perc for key, credit in chain.credits.items(): # don't try to payout users with zero payout if credit.amount == 0: db.session.expunge(credit) del chain.credits[key] continue # Skip fees/donations for the pool address if credit.user == pool_payout['user']: continue # To do a final check of payout amount paid += credit.amount # Fee/donation/bonus lookup fee_perc = chain_fee_perc donate_perc = Decimal('0') settings = custom_settings.get(credit.user) if settings: donate_perc = settings.pdonation_perc # Application assert isinstance(fee_perc, Decimal) assert isinstance(donate_perc, Decimal) fee_amount = credit.amount * fee_perc donate_amount = credit.amount * donate_perc credit.amount -= fee_amount credit.amount -= donate_amount # Recording credit.fee_perc = int(fee_perc * 100) credit.pd_perc = int(donate_perc * 100) # Bookkeeping donations_collected += donate_amount fees_collected += fee_amount if fees_collected > 0: p = Credit.make_credit( user=pool_payout['user'], block=block, currency=pool_payout['currency'].key, source=1, address=pool_payout['address'], amount=+fees_collected) db.session.add(p) if donations_collected > 0: p = Credit.make_credit( user=pool_payout['user'], block=block, currency=pool_payout['currency'].key, source=2, address=pool_payout['address'], amount=+donations_collected) db.session.add(p) current_app.logger.info("Collected {} {} in donation" .format(donations_collected, block.currency)) current_app.logger.info("Collected {} {} from fees" .format(fees_collected, block.currency)) current_app.logger.info( "Net swing from block {} {}" .format(fees_collected + donations_collected, block.currency)) pool_key = (pool_payout['user'], pool_payout['address'], pool_payout['currency']) for chain in chains: if pool_key not in chain.credits: continue current_app.logger.info( "Collected {} from invalid mining addresses on chain {}" .format(chain.credits[pool_key].amount, chain.chainid)) if not simulate: db.session.commit() redis_conn.delete(redis_key) else: db.session.rollback()
def chain_cleanup(chain, dont_simulate): """ Handles removing all redis share slices that we are fairly certain won't be needed to credit a block if one were to be solved in the future. """ if not chain.currencies: current_app.logger.warn( "Unable to run share slice cleanup on chain {} since currencies " "aren't specified!".format(chain.id)) return # Get the current sharechain index from redis current_index = int(redis_conn.get("chain_{}_slice_index".format(chain.id)) or 0) if not current_index: current_app.logger.warn( "Index couldn't be determined for chain {}".format(chain.id)) return # Find the maximum average difficulty of all currencies on this sharechain max_diff = 0 max_diff_currency = None for currency in chain.currencies: currency_data = cache.get("{}_data".format(currency.key)) if not currency_data or currency_data['difficulty_avg_stale']: current_app.logger.warn( "Cache doesn't accurate enough average diff for {} to cleanup chain {}" .format(currency, chain.id)) return if currency_data['difficulty_avg'] > max_diff: max_diff = currency_data['difficulty_avg'] max_diff_currency = currency assert max_diff != 0 hashes_to_solve = max_diff * (2 ** 32) shares_to_solve = hashes_to_solve / chain.algo.hashes_per_share shares_to_keep = shares_to_solve * chain.safety_margin if chain.type == "pplns": shares_to_keep *= chain.last_n current_app.logger.info( "Keeping {:,} shares based on max diff {} for {} on chain {}" .format(shares_to_keep, max_diff, max_diff_currency, chain.id)) # Delete any shares past shares_to_keep found_shares = 0 empty_slices = 0 iterations = 0 for index in xrange(current_index, -1, -1): iterations += 1 slc_key = "chain_{}_slice_{}".format(chain.id, index) key_type = redis_conn.type(slc_key) # Fetch slice information if key_type == "list": empty_slices = 0 # For speed sake, ignore uncompressed slices continue elif key_type == "hash": empty_slices = 0 found_shares += float(redis_conn.hget(slc_key, "total_shares")) elif key_type == "none": empty_slices += 1 else: raise Exception("Unexpected slice key type {}".format(key_type)) if found_shares >= shares_to_keep or empty_slices >= 20: break if found_shares < shares_to_keep: current_app.logger.info( "Not enough shares {:,}/{:,} for cleanup on chain {}" .format(found_shares, shares_to_keep, chain.id)) return current_app.logger.info("Found {:,} shares after {:,} iterations" .format(found_shares, iterations)) # Delete all share slices older than the last index found oldest_kept = index - 1 empty_found = 0 deleted_count = 0 for index in xrange(oldest_kept, -1, -1): if empty_found >= 20: current_app.logger.debug("20 empty in a row, exiting") break key = "chain_{}_slice_{}".format(chain, index) if redis_conn.type(key) == "none": empty_found += 1 else: empty_found = 0 if dont_simulate: if redis_conn.delete(key): deleted_count += 1 else: current_app.logger.info("Would delete {}".format(key)) if dont_simulate: current_app.logger.info( "Deleted {} total share slices from #{:,}->{:,}" .format(deleted_count, oldest_kept, index))
def credit_block(redis_key, simulate=False): """ Calculates credits for users from share records for the latest found block. """ # Don't do this truthiness thing if simulate is not True: simulate = False if simulate: current_app.logger.warn("Running in simulate mode, no commit will be performed") current_app.logger.setLevel(logging.DEBUG) data = redis_conn.hgetall(redis_key) current_app.logger.debug("Processing block with details {}".format(data)) merged = bool(int(data.get('merged', False))) # If start_time isn't listed explicitly do our best to derive from # statistical share records if 'start_time' in data: time_started = datetime.datetime.utcfromtimestamp(float(data.get('start_time'))) else: time_started = last_block_time(data['algo'], merged=merged) block = Block( user=data.get('address'), height=data['height'], total_value=(Decimal(data['total_subsidy']) / 100000000), transaction_fees=(Decimal(data['fees']) / 100000000), difficulty=bits_to_difficulty(data['hex_bits']), hash=data['hash'], time_started=time_started, currency=data['currency'], worker=data.get('worker'), found_at=datetime.datetime.utcfromtimestamp(float(data['solve_time'])), algo=data['algo'], merged=merged) db.session.add(block) db.session.flush() # Parse out chain results from the block key chain_data = {} chain_default = {'shares': Decimal('0')} for key, value in data.iteritems(): if key.startswith("chain_"): _, chain_id, key = key.split("_", 2) chain_id = int(chain_id) chain = chain_data.setdefault(chain_id, chain_default.copy()) chain['id'] = chain_id if key == "shares": value = Decimal(value) elif key == "solve_index": value = int(value) # XXX: Could do extra check for setting duplicate data (overrite) here chain[key] = value # Objectize the data. Use object to store all information moving forward chains = [] for id, chain in chain_data.iteritems(): if chain['shares'] == 0: continue cpo = ChainPayout(chainid=id, block=block, solve_slice=chain['solve_index'], chain_shares=chain['shares']) cpo.user_shares = {} cpo.credits = {} db.session.add(cpo) chains.append(cpo) # XXX: Would be good to check compositeprimarykey integrity here, but will # fail on other constraints #db.session.flush() # XXX: Change to a tabulate print current_app.logger.info("Parsed out chain data of {}".format(chain_data)) # Distribute total block value among chains share_distrib = {chain.chainid: chain.chain_shares for chain in chains} distrib = distributor(block.total_value, share_distrib) for chain in chains: chain.amount = distrib[chain.chainid] # Fetch the share distribution for this payout chain users = set() for chain in chains: # Actually fetch the shares from redis! chain.user_shares = chain.config_obj.calc_shares(chain) # If we have nothing, default to paying out the block finder everything if not chain.user_shares: chain.user_shares[block.user] = 1 # Add the users to the set, no dups users.update(chain.user_shares.keys()) # Record how many shares were used to payout chain.payout_shares = sum(chain.user_shares.itervalues()) # Grab all possible user based settings objects for all chains custom_settings = {} if users: custom_settings = {s.user: s for s in UserSettings.query.filter( UserSettings.user.in_(users)).all()} # XXX: Double check that currency code lookups will work relying on # currency obj hashability # The currencies that are valid to pay out in from this block. Basically, # this block currency + all exchangeable currencies if this block's # currency is also exchangeable valid_currencies = [block.currency_obj] if block.currency_obj.exchangeable is True: valid_currencies.extend(currencies.exchangeable_currencies) # Get the pools payout information for this block global_curr = global_config.pool_payout_currency pool_payout = dict(address=block.currency_obj.pool_payout_addr, currency=block.currency_obj, user=global_curr.pool_payout_addr) # If this currency has no payout address, switch to global default if pool_payout['address'] is None: pool_payout['address'] = global_curr.pool_payout_addr pool_payout['currency'] = global_curr assert block.currency_obj.exchangeable, "Block is un-exchangeable" # Double check valid. Paranoid address_version(pool_payout['address']) def filter_valid(user, address, currency): try: if isinstance(currency, basestring): currency = currencies[currency] except KeyError: current_app.logger.debug( "Converted user {}, addr {}, currency {} => pool addr" " because invalid currency" .format(user, address, currency)) return pool_payout if currency not in valid_currencies: current_app.logger.debug( "Converted user {}, addr {}, currency {} => pool addr" " because invalid currency" .format(user, address, currency)) return pool_payout return dict(address=address, currency=currency, user=user) # Parse usernames and user settings to build appropriate credit objects for chain in chains: for username in chain.user_shares.keys(): try: version = address_version(username) except Exception: # Give these shares to the pool, invalid address version chain.make_credit_obj(shares=chain.user_shares[username], **pool_payout) continue currency = currencies.version_map.get(version) # Check to see if we need to treat them real special :p settings = custom_settings.get(username) shares = chain.user_shares.pop(username) if settings: converted = settings.apply( shares, currency, block.currency, valid_currencies) # Check to make sure no funny business assert sum(c[2] for c in converted) == shares, "Settings apply function returned bad stuff" # Create the separate payout objects from settings return info for address, currency, shares in converted: chain.make_credit_obj( shares=shares, **filter_valid(username, address, currency)) else: # (try to) Payout directly to mining address chain.make_credit_obj( shares=shares, **filter_valid(username, username, currency)) # Calculate the portion that each user recieves for chain in chains: chain.distribute() # Another double check paid = 0 fees_collected = 0 donations_collected = 0 for chain in chains: chain_fee_perc = chain.config_obj.fee_perc for key, credit in chain.credits.items(): # don't try to payout users with zero payout if credit.amount == 0: db.session.expunge(credit) del chain.credits[key] continue # Skip fees/donations for the pool address if credit.user == pool_payout['user']: continue # To do a final check of payout amount paid += credit.amount # Fee/donation/bonus lookup fee_perc = chain_fee_perc donate_perc = Decimal('0') settings = custom_settings.get(credit.user) if settings: donate_perc = settings.pdonation_perc # Application assert isinstance(fee_perc, Decimal) assert isinstance(donate_perc, Decimal) fee_amount = credit.amount * fee_perc donate_amount = credit.amount * donate_perc credit.amount -= fee_amount credit.amount -= donate_amount # Recording credit.fee_perc = int(fee_perc * 100) credit.pd_perc = int(donate_perc * 100) # Bookkeeping donations_collected += donate_amount fees_collected += fee_amount if fees_collected > 0: p = Credit.make_credit( user=pool_payout['user'], block=block, currency=pool_payout['currency'].key, source=1, address=pool_payout['address'], amount=+fees_collected) db.session.add(p) if donations_collected > 0: p = Credit.make_credit( user=pool_payout['user'], block=block, currency=pool_payout['currency'].key, source=2, address=pool_payout['address'], amount=+donations_collected) db.session.add(p) current_app.logger.info("Collected {} in donation".format(donations_collected)) current_app.logger.info("Collected {} from fees".format(fees_collected)) current_app.logger.info("Net swing from block {}" .format(fees_collected + donations_collected)) pool_key = (pool_payout['user'], pool_payout['address'], pool_payout['currency']) for chain in chains: if pool_key not in chain.credits: continue current_app.logger.info( "Collected {} from invalid mining addresses on chain {}" .format(chain.credits[pool_key].amount, chain.chainid)) if not simulate: db.session.commit() redis_conn.delete(redis_key) else: db.session.rollback()