def server_status(): """ Periodicly poll the backend to get number of workers and other general status information. """ algo_miners = {} servers = {} raw_servers = {} for powerpool in powerpools.itervalues(): try: data = powerpool.request('') except Exception: current_app.logger.warn("Couldn't connect to internal monitor {}" .format(powerpool.full_info())) continue else: raw_servers[powerpool.stratum_address] = data servers[powerpool] = dict(workers=data['client_count_authed'], miners=data['address_count'], hashrate=data['hps'], name=powerpool.stratum_address) algo_miners.setdefault(powerpool.chain.algo.key, 0) algo_miners[powerpool.chain.algo.key] += data['address_count'] cache.set('raw_server_status', raw_servers, timeout=1200) cache.set('server_status', servers, timeout=1200) cache.set('total_miners', algo_miners, timeout=1200)
def forward_coinservs(host): """ Given a hostname, connects to a remote and tunnels all coinserver ports to local ports. Useful for development testing. """ args = [host, "-N"] for currency in currencies.itervalues(): if not currency.coinserv: continue args.append("-L {0}:127.0.0.1:{0}" .format(currency.coinserv.config['port'])) for pp in powerpools.itervalues(): parts = urlparse(pp.monitor_address) if parts.hostname not in ['localhost', '127.0.0.1']: continue args.append("-L {0}:127.0.0.1:{0}".format(parts.port)) current_app.logger.info(("/usr/bin/ssh", "/usr/bin/ssh", args)) os.execl("/usr/bin/ssh", "/usr/bin/ssh", *args)
def server_status(): """ Periodically poll the backend to get number of workers and other general status information. """ past_chain_profit = get_past_chain_profit() currency_hashrates = {} algo_miners = {} servers = {} raw_servers = {} for powerpool in powerpools.itervalues(): server_default = dict(workers=0, miners=0, hashrate=0, name='???', profit_4d=0, currently_mining='???') try: data = powerpool.request('') except Exception: current_app.logger.warn("Couldn't connect to internal monitor {}" .format(powerpool.full_info())) continue else: raw_servers[powerpool.stratum_address] = data status = {'workers': data['client_count_authed'], 'miners': data['address_count'], 'hashrate': data['hps'], 'name': powerpool.stratum_address, 'profit_4d': past_chain_profit[powerpool.chain.id]} server_default.update(status) servers[powerpool.key] = server_default algo_miners.setdefault(powerpool.chain.algo.key, 0) algo_miners[powerpool.chain.algo.key] += data['address_count'] if 'last_flush_job' in data and 'currency' in data['last_flush_job']: curr = data['last_flush_job']['currency'] servers[powerpool.key].update({'currently_mining': curr}) currency_hashrates.setdefault(currencies[curr], 0) currency_hashrates[currencies[curr]] += data['hps'] # Add hashrate to the merged networks too if 'merged_networks' in data['last_flush_job']: for currency in data['last_flush_job']['merged_networks']: currency_hashrates.setdefault(currencies[currency], 0) currency_hashrates[currencies[currency]] += data['hps'] # Set hashrate to 0 if not located for currency in currencies.itervalues(): hashrate = 0 if currency in currency_hashrates: hashrate = currency_hashrates[currency] cache.set('hashrate_' + currency.key, hashrate, timeout=120) cache.set('raw_server_status', raw_servers, timeout=1200) cache.set('server_status', servers, timeout=1200) cache.set('total_miners', algo_miners, timeout=1200)
def server_status(): """ Periodically poll the backend to get number of workers and other general status information. """ past_chain_profit = get_past_chain_profit() currency_hashrates = {} algo_miners = {} servers = {} raw_servers = {} for powerpool in powerpools.itervalues(): server_default = dict(workers=0, miners=0, hashrate=0, name='???', profit_4d=0, currently_mining='???') try: data = powerpool.request('') except Exception: current_app.logger.warn("Couldn't connect to internal monitor {}" .format(powerpool.full_info())) continue else: raw_servers[powerpool.stratum_address] = data status = {'workers': data['client_count_authed'], 'miners': data['address_count'], 'hashrate': data['hps'], 'name': powerpool.stratum_address, 'profit_4d': past_chain_profit[powerpool.chain.id]} server_default.update(status) servers[powerpool.key] = server_default algo_miners.setdefault(powerpool.chain.algo.key, 0) algo_miners[powerpool.chain.algo.key] += data['address_count'] if 'last_flush_job' in data and data['last_flush_job'] \ and 'currency' in data['last_flush_job']: curr = data['last_flush_job']['currency'] servers[powerpool.key].update({'currently_mining': curr}) currency_hashrates.setdefault(currencies[curr], 0) currency_hashrates[currencies[curr]] += data['hps'] # Add hashrate to the merged networks too if 'merged_networks' in data['last_flush_job']: for currency in data['last_flush_job']['merged_networks']: currency_hashrates.setdefault(currencies[currency], 0) currency_hashrates[currencies[currency]] += data['hps'] # Set hashrate to 0 if not located for currency in currencies.itervalues(): hashrate = 0 if currency in currency_hashrates: hashrate = currency_hashrates[currency] cache.set('hashrate_' + currency.key, hashrate, timeout=120) cache.set('raw_server_status', raw_servers, timeout=1200) cache.set('server_status', servers, timeout=1200) cache.set('total_miners', algo_miners, timeout=1200)