def setup(on_startup): '''Setup mining service internal environment. You should not need to change this. If you want to use another Worker manager or Share manager, you should set proper reference to Interfaces class *before* you call setup() in the launcher script.''' from stratum import settings from interfaces import Interfaces # Let's wait until share manager and worker manager boot up (yield Interfaces.share_manager.on_load) (yield Interfaces.worker_manager.on_load) from lib.block_updater import BlockUpdater from lib.template_registry import TemplateRegistry from lib.bitcoin_rpc import BitcoinRPC from lib.block_template import BlockTemplate from lib.coinbaser import SimpleCoinbaser bitcoin_rpc = BitcoinRPC(settings.NOVACOIN_TRUSTED_HOST, settings.NOVACOIN_TRUSTED_PORT, settings.NOVACOIN_TRUSTED_USER, settings.NOVACOIN_TRUSTED_PASSWORD) import stratum.logger log = stratum.logger.get_logger('mining') log.info('Waiting for novacoin RPC...') while True: try: result = (yield bitcoin_rpc.getblocktemplate()) if isinstance(result, dict): log.info('Response from novacoin RPC OK') break except: time.sleep(1) coinbaser = SimpleCoinbaser(bitcoin_rpc, settings.CENTRAL_WALLET) (yield coinbaser.on_load) registry = TemplateRegistry(BlockTemplate, coinbaser, bitcoin_rpc, settings.INSTANCE_ID, MiningSubscription.on_template, Interfaces.share_manager.on_network_block) # Template registry is the main interface between Stratum service # and pool core logic Interfaces.set_template_registry(registry) # Set up polling mechanism for detecting new block on the network # This is just failsafe solution when -blocknotify # mechanism is not working properly BlockUpdater(registry, bitcoin_rpc) log.info("MINING SERVICE IS READY") on_startup.callback(True)
def __init__(self): log.debug(u"Got to Bitcoin RPC Manager") self.conns = {} self.conns[0] = BitcoinRPC(settings.COINDAEMON_TRUSTED_HOST, settings.COINDAEMON_TRUSTED_PORT, settings.COINDAEMON_TRUSTED_USER, settings.COINDAEMON_TRUSTED_PASSWORD) self.curr_conn = 0 for x in xrange(1, 99): if hasattr(settings, u'COINDAEMON_TRUSTED_HOST_' + unicode(x)) and hasattr( settings, u'COINDAEMON_TRUSTED_PORT_' + unicode(x)) and hasattr( settings, u'COINDAEMON_TRUSTED_USER_' + unicode(x)) and hasattr( settings, u'COINDAEMON_TRUSTED_PASSWORD_' + unicode(x)): self.conns[len(self.conns)] = BitcoinRPC( settings.__dict__[u'COINDAEMON_TRUSTED_HOST_' + unicode(x)], settings.__dict__[u'COINDAEMON_TRUSTED_PORT_' + unicode(x)], settings.__dict__[u'COINDAEMON_TRUSTED_USER_' + unicode(x)], settings.__dict__[u'COINDAEMON_TRUSTED_PASSWORD_' + unicode(x)])
def __init__(self, host=None, port=None, user=None, passwd=None): log.debug("Got to Bitcoin RPC Manager") self.conns = {} if host is None or port is None or user is None or passwd is None: self.conns[0] = BitcoinRPC(settings.COINDAEMON_TRUSTED_HOST, settings.COINDAEMON_TRUSTED_PORT, settings.COINDAEMON_TRUSTED_USER, settings.COINDAEMON_TRUSTED_PASSWORD) self.curr_conn = 0 for x in range(1, 99): if hasattr( settings, 'COINDAEMON_TRUSTED_HOST_' + str(x)) and hasattr( settings, 'COINDAEMON_TRUSTED_PORT_' + str(x)) and hasattr( settings, 'COINDAEMON_TRUSTED_USER_' + str(x)) and hasattr( settings, 'COINDAEMON_TRUSTED_PASSWORD_' + str(x)): self.conns[len(self.conns)] = BitcoinRPC( settings.__dict__['COINDAEMON_TRUSTED_HOST_' + str(x)], settings.__dict__['COINDAEMON_TRUSTED_PORT_' + str(x)], settings.__dict__['COINDAEMON_TRUSTED_USER_' + str(x)], settings.__dict__['COINDAEMON_TRUSTED_PASSWORD_' + str(x)]) else: self.conns[0] = BitcoinRPC(host, port, user, passwd) self.curr_conn = 0
def setup(on_startup): '''Setup mining service internal environment. You should not need to change this. If you want to use another Worker manager or Share manager, you should set proper reference to Interfaces class *before* you call setup() in the launcher script.''' from stratum import settings # Get logging online as soon as possible import stratum.logger log = stratum.logger.get_logger('mining') from interfaces import Interfaces # Let's wait until share manager and worker manager boot up (yield Interfaces.share_manager.on_load) (yield Interfaces.worker_manager.on_load) from lib.block_updater import BlockUpdater from lib.template_registry import TemplateRegistry from lib.bitcoin_rpc import BitcoinRPC from lib.block_template import BlockTemplate from lib.coinbaser import SimpleCoinbaser bitcoin_rpc = BitcoinRPC(settings.BITCOIN_TRUSTED_HOST, settings.BITCOIN_TRUSTED_PORT, settings.BITCOIN_TRUSTED_USER, settings.BITCOIN_TRUSTED_PASSWORD) # Check bitcoind # Check we can connect (sleep) # Check the results: # - getblocktemplate is avalible (Die if not) # - we are not still downloading the blockchain (Sleep) log.info("Connecting to bitcoind...") while True: try: result = (yield bitcoin_rpc.getblocktemplate()) if isinstance(result, dict): if result['version'] == 2: break except Exception, e: if isinstance(e[2], str): if isinstance(json.loads(e[2])['error']['message'], str): error = json.loads(e[2])['error']['message'] if error == "Method not found": log.error( "Bitcoind does not support getblocktemplate!!! (time to upgrade.)" ) reactor.stop() elif error == "Bitcoin is downloading blocks...": log.error( "Bitcoind downloading blockchain... will check back in 30 sec" ) time.sleep(29) else: log.error("Bitcoind Error: %s", error) time.sleep(1) # If we didn't get a result or the connect failed
def setup(on_startup): '''Setup mining service internal environment. You should not need to change this. If you want to use another Worker manager or Share manager, you should set proper reference to Interfaces class *before* you call setup() in the launcher script.''' from stratum import settings # Get logging online as soon as possible import stratum.logger log = stratum.logger.get_logger('mining') from interfaces import Interfaces # Let's wait until share manager and worker manager boot up (yield Interfaces.share_manager.on_load) (yield Interfaces.worker_manager.on_load) from lib.block_updater import BlockUpdater from lib.template_registry import TemplateRegistry from lib.bitcoin_rpc import BitcoinRPC from lib.block_template import BlockTemplate from lib.coinbaser import SimpleCoinbaser bitcoin_rpc = BitcoinRPC(settings.BITCOIN_TRUSTED_HOST, settings.BITCOIN_TRUSTED_PORT, settings.BITCOIN_TRUSTED_USER, settings.BITCOIN_TRUSTED_PASSWORD) # Check bitcoind # Check we can connect (sleep) # Check the results: # - getblocktemplate is avalible (Die if not) # - we are not still downloading the blockchain (Sleep) log.info("Connecting to bitcoind...") while True: try: result = (yield bitcoin_rpc.getblocktemplate()) if isinstance(result, dict): if result['version'] == 2: break except Exception, e: if isinstance(e[2], str): if isinstance(json.loads(e[2])['error']['message'],str): error = json.loads(e[2])['error']['message'] if error == "Method not found": log.error("Bitcoind does not support getblocktemplate!!! (time to upgrade.)") reactor.stop() elif error == "Bitcoin is downloading blocks...": log.error("Bitcoind downloading blockchain... will check back in 30 sec") time.sleep(29) else: log.error("Bitcoind Error: %s", error) time.sleep(1) # If we didn't get a result or the connect failed
def __init__(self): self.conns = {} self.conns[0] = BitcoinRPC(settings.COINDAEMON_TRUSTED_HOST, settings.COINDAEMON_TRUSTED_PORT, settings.COINDAEMON_TRUSTED_USER, settings.COINDAEMON_TRUSTED_PASSWORD) self.curr_conn = 0 for x in range (1, 99): if hasattr(settings, 'COINDAEMON_TRUSTED_HOST_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PORT_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_USER_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PASSWORD_' + str(x)): self.conns[len(self.conns)] = BitcoinRPC(settings.__dict__['COINDAEMON_TRUSTED_HOST_' + str(x)], settings.__dict__['COINDAEMON_TRUSTED_PORT_' + str(x)], settings.__dict__['COINDAEMON_TRUSTED_USER_' + str(x)], settings.__dict__['COINDAEMON_TRUSTED_PASSWORD_' + str(x)])
def __init__(self): # Fire deferred when manager is ready self.on_load = defer.Deferred() self._last_worker_session_id = 0 self._watchdog = PushWatchdog(self.push_shares) self._bitcoin_rpc = BitcoinRPC(config.BITCOIN_TRUSTED_HOST, config.BITCOIN_TRUSTED_PORT, config.BITCOIN_TRUSTED_USER, config.BITCOIN_TRUSTED_PASSWORD) self._db_share_backend = DbShareBackend() self._sink_share_backend = SinkShareBackend() self.load_block_info()
def __init__(self, host=None, port=None, user=None, passwd=None): self.conns = {} if host is None or port is None or user is None or passwd is None: self.conns[0] = BitcoinRPC(settings.MERGED_HOST, settings.MERGED_PORT, settings.MERGED_USER, settings.MERGED_PASSWORD) self.curr_conn = 0 for x in range(1, 99): if hasattr(settings, 'MERGED_HOST_' + str(x)) and hasattr( settings, 'MERGED_PORT_' + str(x)) and hasattr( settings, 'MERGED_USER_' + str(x)) and hasattr( settings, 'MERGED_PASSWORD_' + str(x)): self.conns[len(self.conns)] = BitcoinRPC( settings.__dict__['MERGED_HOST_' + str(x)], settings.__dict__['MERGED_PORT_' + str(x)], settings.__dict__['MERGED_USER_' + str(x)], settings.__dict__['MERGED_PASSWORD_' + str(x)]) else: self.conns[0] = BitcoinRPC(host, port, user, passwd) self.curr_conn = 0
def setup(on_startup): '''Setup mining service internal environment. You should not need to change this. If you want to use another Worker manager or Share manager, you should set proper reference to Interfaces class *before* you call setup() in the launcher script.''' from stratum import settings from interfaces import Interfaces # Let's wait until share manager and worker manager boot up (yield Interfaces.share_manager.on_load) (yield Interfaces.worker_manager.on_load) from lib.block_updater import BlockUpdater from lib.template_registry import TemplateRegistry from lib.bitcoin_rpc import BitcoinRPC from lib.block_template import BlockTemplate from lib.coinbaser import SimpleCoinbaser bitcoin_rpc = BitcoinRPC(settings.BITCOIN_TRUSTED_HOST, settings.BITCOIN_TRUSTED_PORT, settings.BITCOIN_TRUSTED_USER, settings.BITCOIN_TRUSTED_PASSWORD) import stratum.logger log = stratum.logger.get_logger('mining') log.info('Waiting for bitcoin RPC...') while True: try: result = (yield bitcoin_rpc.getblocktemplate()) if isinstance(result, dict): log.info('Response from bitcoin RPC OK') break except Exception, e: log.info(str(e)) time.sleep(1)
def __init__(self): # Fire deferred when manager is ready self.on_load = defer.Deferred() self._last_worker_session_id = 0 self._watchdog = PushWatchdog(self.push_shares) self._bitcoin_rpc = BitcoinRPC( config.BITCOIN_TRUSTED_HOST, config.BITCOIN_TRUSTED_PORT, config.BITCOIN_TRUSTED_USER, config.BITCOIN_TRUSTED_PASSWORD, ) self._db_share_backend = DbShareBackend() self._sink_share_backend = SinkShareBackend() self.load_block_info()
def add_connection(self, host, port, user, password): # TODO: Some string sanity checks self.conns[len(self.conns)] = BitcoinRPC(host, port, user, password)
class ShareManager(object): def __init__(self): # Fire deferred when manager is ready self.on_load = defer.Deferred() self._last_worker_session_id = 0 self._watchdog = PushWatchdog(self.push_shares) self._bitcoin_rpc = BitcoinRPC(config.BITCOIN_TRUSTED_HOST, config.BITCOIN_TRUSTED_PORT, config.BITCOIN_TRUSTED_USER, config.BITCOIN_TRUSTED_PASSWORD) self._db_share_backend = DbShareBackend() self._sink_share_backend = SinkShareBackend() self.load_block_info() def get_db_share_backend(self): return self._db_share_backend def on_network_block(self, prevhash): """Prints when there's new block coming from the network (possibly new round)""" # Refresh info about current round (another instance mined block?) self.load_block_info() @defer.inlineCallbacks def load_block_info(self, _=None): # Load current block_id from db (block_id, date_started) = (yield dbquery.get_block_info()) block_started_secs = int(time.mktime(date_started.utctimetuple())) self._db_share_backend.set_block_info(block_id, block_started_secs) # We're ready! if not self.on_load.called: self.on_load.callback(True) @staticmethod def initialize_session(session): if not session.get('SM_worker_stats'): session['SM_worker_stats'] = {} def initialize_worker_stats(self, session, worker_id, worker_name): now = posix_secs() self._last_worker_session_id += 1 stats = { 'session_id': session['session_id'], 'worker_id': worker_id, 'worker_name': worker_name, 'worker_session_id': self._last_worker_session_id, 'authorized_at': now, 'stale_submits': 0, 'old_submits': 0, 'invalid_submits': 0, 'valid_submits': 0, 'valid_shares': 0, 'last_valid_share': None, 'wsma_prev_rate_short': None, 'wsma_prev_rate_long': None, 'wsma_start_time_short': now, 'wsma_start_time_long': now, 'wsma_shares_short': 0, 'wsma_shares_long': 0, 'wsma_rate_short': 0., 'wsma_rate_long': 0. } session['SM_worker_stats'][worker_name] = stats return stats @staticmethod def get_worker_stats(session, worker_name): return session['SM_worker_stats'][worker_name] @staticmethod def _compute_wsma(worker_stats, shares, submit_time, time_period, shares_key, start_time_key, prev_rate_key): worker_stats[shares_key] += shares curr_window = submit_time - worker_stats[start_time_key] + 1 prev_rate = worker_stats[prev_rate_key] # First period for the miner if not prev_rate: prev_rate = 0 prev_window = 0 # Any next period when some previous exists else: prev_window = time_period - curr_window if prev_window < 0: prev_window = 0 # Calculate the new partly from the previous window and partly form the # currently growing one new_rate = float((prev_rate * prev_window) + worker_stats[shares_key]) / \ (prev_window + curr_window) # Did we finished a complete computation window? if curr_window > time_period: worker_stats[shares_key] = shares worker_stats[start_time_key] = submit_time worker_stats[prev_rate_key] = new_rate return new_rate def _update_stats_by_submit(self, worker_stats, shares, submit_secs): """ When shares = 0 then we don't count it as a valid shares submission, only rate stats are updated. """ worker_stats['wsma_rate_short'] = \ self._compute_wsma(worker_stats, shares, submit_secs, config.SHARE_MANAGER_SHORT_WSMA_PERIOD_S, 'wsma_shares_short', 'wsma_start_time_short', 'wsma_prev_rate_short') worker_stats['wsma_rate_long'] = \ self._compute_wsma(worker_stats, shares, submit_secs, config.SHARE_MANAGER_LONG_WSMA_PERIOD_S, 'wsma_shares_long', 'wsma_start_time_long', 'wsma_prev_rate_long') if shares > 0: worker_stats['valid_submits'] += 1 worker_stats['valid_shares'] += shares worker_stats['last_valid_share'] = submit_secs # Let the reporter know that some worker statistics have changed and it # should be reported Interfaces.reporter.worker_stats_changed(worker_stats) def update_stats_by_no_submit(self, session): now = posix_time() for worker_stats in session['SM_worker_stats'].itervalues(): # Only update the stats when there was already at least one valid share # or some other fact could change if worker_stats['last_valid_share'] or \ worker_stats['invalid_submits'] > 0 or \ worker_stats['stale_submits'] > 0: self._update_stats_by_submit(worker_stats, 0, now) @staticmethod def get_session_short_wsma(session): result = 0 for worker_stats in session['SM_worker_stats'].itervalues(): result += worker_stats['wsma_rate_short'] return result def push_shares(self): """Is called by PushWatchdog when we need to push buffered shares to db""" secs = posix_secs() self._db_share_backend.push_shares(secs) self._sink_share_backend.push_shares(secs) # Sometimes notification is faster than # database update (because of old pool?) self.load_block_info() @staticmethod def on_invalid_submit_share(session, worker_stats, shares, job_id): if config.LOG_INVALID_SHARE_SUBMIT: log.info("%x INVALID %d %s %s" % (job_id, shares, session['client_ip'], worker_stats['worker_name'])) worker_stats['invalid_submits'] += 1 Interfaces.reporter.worker_stats_changed(worker_stats) def on_submit_share(self, session, worker_stats, shares, submit_secs): #if config.LOG_VALID_SHARE_SUBMIT or not is_valid: # log.info("%s %x %s %d %s %s" % (block_hash, job_id, 'valid' if is_valid else 'INVALID', # shares, session['client_ip'], worker_stats['worker_name'])) self._update_stats_by_submit(worker_stats, shares, submit_secs) self._db_share_backend.on_submit_share(session, worker_stats, shares, submit_secs) self._sink_share_backend.on_submit_share(session, worker_stats, shares, submit_secs) @defer.inlineCallbacks def on_submit_block(self, is_accepted, worker_stats, block_header, block_hash, submit_secs, value): log.info("BLOCK %s %s !!!" % (block_hash, 'ACCEPTED' if is_accepted else 'REJECTED')) if not is_accepted: if not config.ACCEPT_INVALID_BLOCK__NONCE: return else: log.error("Rejected block saved to DB %s" % block_hash) try: info = (yield self._bitcoin_rpc.getinfo()) block_height = info['blocks'] # FIXME difficulty = math.floor(float(info['difficulty'])) except: log.error( "Exception during getting block info after block submission: %s" % traceback.format_exc()) block_height = -1 difficulty = -1 self._db_share_backend.on_submit_block(block_hash, block_height, difficulty, submit_secs, value, worker_stats) self._sink_share_backend.on_submit_block(block_hash, block_height, difficulty, submit_secs, value, worker_stats) self.load_block_info()
def setup(on_startup): '''Setup mining service internal environment. You should not need to change this. If you want to use another Worker manager or Share manager, you should set proper reference to Interfaces class *before* you call setup() in the launcher script.''' import lib.settings as settings # Get logging online as soon as possible import lib.logger log = lib.logger.get_logger('mining') from interfaces import Interfaces from lib.block_updater import BlockUpdater from lib.template_registry import TemplateRegistry from lib.bitcoin_rpc import BitcoinRPC from lib.block_template import BlockTemplate from lib.coinbaser import SimpleCoinbaser bitcoin_rpc = BitcoinRPC(settings.DAEMON_TRUSTED_HOST, settings.DAEMON_TRUSTED_PORT, settings.DAEMON_TRUSTED_USER, settings.DAEMON_TRUSTED_PASSWORD) log.info("Connecting to RPC...") while True: try: log.info('Waiting for RPC...') result = (yield bitcoin_rpc.getblocktemplate()) if isinstance(result, dict): break except: time.sleep(1) log.info('Connected to RPC - Ready to GO!') # Start the coinbaser coinbaser = SimpleCoinbaser(bitcoin_rpc, getattr(settings, 'CENTRAL_WALLET')) (yield coinbaser.on_load) registry = TemplateRegistry(BlockTemplate, coinbaser, bitcoin_rpc, getattr(settings, 'INSTANCE_ID'), MiningSubscription.on_template, Interfaces.share_manager.on_network_block) # Template registry is the main interface between Stratum service # and pool core logic Interfaces.set_template_registry(registry) # Set up polling mechanism for detecting new block on the network # This is just failsafe solution when -blocknotify # mechanism is not working properly BlockUpdater(registry, bitcoin_rpc) prune_thr = threading.Thread(target=WorkLogPruner, args=(Interfaces.worker_manager.job_log, )) prune_thr.daemon = True prune_thr.start() log.info("MINING SERVICE IS READY") on_startup.callback(True)
class ShareManager(object): def __init__(self): # Fire deferred when manager is ready self.on_load = defer.Deferred() self._last_worker_session_id = 0 self._watchdog = PushWatchdog(self.push_shares) self._bitcoin_rpc = BitcoinRPC( config.BITCOIN_TRUSTED_HOST, config.BITCOIN_TRUSTED_PORT, config.BITCOIN_TRUSTED_USER, config.BITCOIN_TRUSTED_PASSWORD, ) self._db_share_backend = DbShareBackend() self._sink_share_backend = SinkShareBackend() self.load_block_info() def get_db_share_backend(self): return self._db_share_backend def on_network_block(self, prevhash): """Prints when there's new block coming from the network (possibly new round)""" # Refresh info about current round (another instance mined block?) self.load_block_info() @defer.inlineCallbacks def load_block_info(self, _=None): # Load current block_id from db (block_id, date_started) = (yield dbquery.get_block_info()) block_started_secs = int(time.mktime(date_started.utctimetuple())) self._db_share_backend.set_block_info(block_id, block_started_secs) # We're ready! if not self.on_load.called: self.on_load.callback(True) @staticmethod def initialize_session(session): if not session.get("SM_worker_stats"): session["SM_worker_stats"] = {} def initialize_worker_stats(self, session, worker_id, worker_name): now = posix_secs() self._last_worker_session_id += 1 stats = { "session_id": session["session_id"], "worker_id": worker_id, "worker_name": worker_name, "worker_session_id": self._last_worker_session_id, "authorized_at": now, "stale_submits": 0, "old_submits": 0, "invalid_submits": 0, "valid_submits": 0, "valid_shares": 0, "last_valid_share": None, "wsma_prev_rate_short": None, "wsma_prev_rate_long": None, "wsma_start_time_short": now, "wsma_start_time_long": now, "wsma_shares_short": 0, "wsma_shares_long": 0, "wsma_rate_short": 0.0, "wsma_rate_long": 0.0, } session["SM_worker_stats"][worker_name] = stats return stats @staticmethod def get_worker_stats(session, worker_name): return session["SM_worker_stats"][worker_name] @staticmethod def _compute_wsma(worker_stats, shares, submit_time, time_period, shares_key, start_time_key, prev_rate_key): worker_stats[shares_key] += shares curr_window = submit_time - worker_stats[start_time_key] + 1 prev_rate = worker_stats[prev_rate_key] # First period for the miner if not prev_rate: prev_rate = 0 prev_window = 0 # Any next period when some previous exists else: prev_window = time_period - curr_window if prev_window < 0: prev_window = 0 # Calculate the new partly from the previous window and partly form the # currently growing one new_rate = float((prev_rate * prev_window) + worker_stats[shares_key]) / (prev_window + curr_window) # Did we finished a complete computation window? if curr_window > time_period: worker_stats[shares_key] = shares worker_stats[start_time_key] = submit_time worker_stats[prev_rate_key] = new_rate return new_rate def _update_stats_by_submit(self, worker_stats, shares, submit_secs): """ When shares = 0 then we don't count it as a valid shares submission, only rate stats are updated. """ worker_stats["wsma_rate_short"] = self._compute_wsma( worker_stats, shares, submit_secs, config.SHARE_MANAGER_SHORT_WSMA_PERIOD_S, "wsma_shares_short", "wsma_start_time_short", "wsma_prev_rate_short", ) worker_stats["wsma_rate_long"] = self._compute_wsma( worker_stats, shares, submit_secs, config.SHARE_MANAGER_LONG_WSMA_PERIOD_S, "wsma_shares_long", "wsma_start_time_long", "wsma_prev_rate_long", ) if shares > 0: worker_stats["valid_submits"] += 1 worker_stats["valid_shares"] += shares worker_stats["last_valid_share"] = submit_secs # Let the reporter know that some worker statistics have changed and it # should be reported Interfaces.reporter.worker_stats_changed(worker_stats) def update_stats_by_no_submit(self, session): now = posix_time() for worker_stats in session["SM_worker_stats"].itervalues(): # Only update the stats when there was already at least one valid share # or some other fact could change if ( worker_stats["last_valid_share"] or worker_stats["invalid_submits"] > 0 or worker_stats["stale_submits"] > 0 ): self._update_stats_by_submit(worker_stats, 0, now) @staticmethod def get_session_short_wsma(session): result = 0 for worker_stats in session["SM_worker_stats"].itervalues(): result += worker_stats["wsma_rate_short"] return result def push_shares(self): """Is called by PushWatchdog when we need to push buffered shares to db""" secs = posix_secs() self._db_share_backend.push_shares(secs) self._sink_share_backend.push_shares(secs) # Sometimes notification is faster than # database update (because of old pool?) self.load_block_info() @staticmethod def on_invalid_submit_share(session, worker_stats, shares, job_id): if config.LOG_INVALID_SHARE_SUBMIT: log.info("%x INVALID %d %s %s" % (job_id, shares, session["client_ip"], worker_stats["worker_name"])) worker_stats["invalid_submits"] += 1 Interfaces.reporter.worker_stats_changed(worker_stats) def on_submit_share(self, session, worker_stats, shares, submit_secs): # if config.LOG_VALID_SHARE_SUBMIT or not is_valid: # log.info("%s %x %s %d %s %s" % (block_hash, job_id, 'valid' if is_valid else 'INVALID', # shares, session['client_ip'], worker_stats['worker_name'])) self._update_stats_by_submit(worker_stats, shares, submit_secs) self._db_share_backend.on_submit_share(session, worker_stats, shares, submit_secs) self._sink_share_backend.on_submit_share(session, worker_stats, shares, submit_secs) @defer.inlineCallbacks def on_submit_block(self, is_accepted, worker_stats, block_header, block_hash, submit_secs, value): log.info("BLOCK %s %s !!!" % (block_hash, "ACCEPTED" if is_accepted else "REJECTED")) if not is_accepted: if not config.ACCEPT_INVALID_BLOCK__NONCE: return else: log.error("Rejected block saved to DB %s" % block_hash) try: info = (yield self._bitcoin_rpc.getinfo()) block_height = info["blocks"] # FIXME difficulty = math.floor(float(info["difficulty"])) except: log.error("Exception during getting block info after block submission: %s" % traceback.format_exc()) block_height = -1 difficulty = -1 self._db_share_backend.on_submit_block(block_hash, block_height, difficulty, submit_secs, value, worker_stats) self._sink_share_backend.on_submit_block(block_hash, block_height, difficulty, submit_secs, value, worker_stats) self.load_block_info()