Esempio n. 1
0
def cache_profitability():
    """
    Calculates the profitability from recent blocks
    """
    # track chain profits
    chain_profit = {}

    start_time = datetime.datetime.utcnow() - datetime.timedelta(hours=96)

    query_currencies = [c.key for c in currencies.itervalues() if c.mineable and c.sellable]
    blocks = (Block.query.filter(Block.found_at > start_time).
              filter(Block.currency.in_(query_currencies)))

    for block in blocks:
        chain_data = block.chain_profitability()
        current_app.logger.info("Get {} from {}".format(chain_data, block))

        for chainid, data in chain_data.iteritems():

            if chainid not in chains:
                current_app.logger.warn(
                    "Chain #{} not configured properly! Skipping it..."
                    .format(chainid))
                continue

            # Set the block for convenience later
            data['block'] = block
            chain_profit.setdefault(chainid, {})
            chain_profit[chainid].setdefault(block.currency_obj, []).append(data)

    for chainid, chain_currencies in chain_profit.iteritems():
        merged_shares = 0
        main_shares = 0
        merged_currencies = 0
        btc_total = 0
        for currency, entries in chain_currencies.iteritems():
            if currency.merged:
                merged_currencies += 1
            for data in entries:
                btc_total += data['btc_total']
                if currency.merged:
                    merged_shares += data['sold_shares']
                else:
                    main_shares += data['sold_shares']

        hps = chains[chainid].algo.hashes_per_share
        if main_shares != 0:
            btc_per = btc_total / (main_shares * hps)
        elif merged_shares != 0:
            btc_per = btc_total / (merged_shares * hps / merged_currencies)
        else:
            btc_per = 0
        btc_per *= 86400  # per day

        current_app.logger.debug("Caching chain #{} with profit {}"
                                 .format(chainid, btc_per))

        cache.set('chain_{}_profitability'.format(chainid),
                  btc_per, timeout=3600 * 2)
Esempio n. 2
0
def cache_profitability():
    """
    Calculates the profitability from recent blocks
    """
    # track chain profits
    chain_profit = {}

    start_time = datetime.datetime.utcnow() - datetime.timedelta(hours=96)

    query_currencies = [c.key for c in currencies.itervalues() if c.mineable and c.sellable]
    blocks = (Block.query.filter(Block.found_at > start_time).
              filter(Block.currency.in_(query_currencies)).all())

    for block in blocks:
        chain_data = block.chain_profitability()
        current_app.logger.info("Get {} from {}".format(chain_data, block))

        for chainid, data in chain_data.iteritems():

            if chainid not in chains:
                current_app.logger.warn(
                    "Chain #{} not configured properly! Skipping it..."
                    .format(chainid))
                continue

            # Set the block for convenience later
            data['block'] = block
            chain_profit.setdefault(chainid, {})
            chain_profit[chainid].setdefault(block.currency_obj, []).append(data)

    for chainid, chain_currencies in chain_profit.iteritems():
        merged_shares = 0
        main_shares = 0
        merged_currencies = 0
        btc_total = 0
        for currency, entries in chain_currencies.iteritems():
            if currency.merged:
                merged_currencies += 1
            for data in entries:
                btc_total += data['btc_total']
                if currency.merged:
                    merged_shares += data['sold_shares']
                else:
                    main_shares += data['sold_shares']

        hps = chains[chainid].algo.hashes_per_share
        if main_shares != 0:
            btc_per = btc_total / (main_shares * hps)
        elif merged_shares != 0:
            btc_per = btc_total / (merged_shares * hps / merged_currencies)
        else:
            btc_per = 0
        btc_per *= 86400  # per day

        current_app.logger.debug("Caching chain #{} with profit {}"
                                 .format(chainid, btc_per))

        cache.set('chain_{}_profitability'.format(chainid),
                  btc_per, timeout=3600 * 8)
Esempio n. 3
0
def convert_unexchangeable(dont_simulate):
    """ Converts Credit exchanges for unexchangeable currencies to payout the
    pool.

    XXX: Now broken due to config refactor """
    unexchangeable = []
    for currency in currencies.itervalues():
        # Skip unused currencies
        if not currency.coinserv:
            continue

        if not currency.exchangeable:
            unexchangeable.append((currency.key, currency.pool_payout))

    current_app.logger.info("Looking for CreditExchange's for currencies {}"
                            .format(unexchangeable))

    for key, pool_payout in unexchangeable:
        blocks = {}
        hashes = set()
        for ce in (CreditExchange.query.join(CreditExchange.block, aliased=True).
                   filter_by(currency=key)):
            blocks.setdefault(ce.block, [0, []])
            hashes.add(ce.block.hash)
            blocks[ce.block][0] += ce.amount
            blocks[ce.block][1].append(ce)
            db.session.delete(ce)

        # Sanity check, make sure block objs as keys is valid
        assert len(hashes) == len(blocks)

        for block, (amount, credits) in blocks.iteritems():
            # Create a new credit for the pool to displace the deleted
            # CreditExchanges. It will always be a credit since the currency is
            # unexchangeable
            pool_block = Credit(
                source=0,
                address=pool_payout['address'],
                user=pool_payout['user'],
                currency=pool_payout['currency'].key,
                amount=amount,
                block_id=block.id,
                payable=block.mature)
            db.session.add(pool_block)

            current_app.logger.info(
                "Block {} status {} value {} removed {} CreditExchanges of {} total amount"
                .format(block, block.status, block.total_value, len(credits), amount))

        current_app.logger.info("For currency {}, updated {} blocks"
                                .format(key, len(blocks)))

    if dont_simulate is True:
        current_app.logger.info("Committing transaction!")
        db.session.commit()
    else:
        current_app.logger.info("Rolling back!")
        db.session.rollback()
Esempio n. 4
0
def convert_unexchangeable(dont_simulate):
    """ Converts Credit exchanges for unexchangeable currencies to payout the
    pool.

    XXX: Now broken due to config refactor """
    unexchangeable = []
    for currency in currencies.itervalues():
        # Skip unused currencies
        if not currency.coinserv:
            continue

        if not currency.exchangeable:
            unexchangeable.append((currency.key, currency.pool_payout))

    current_app.logger.info("Looking for CreditExchange's for currencies {}"
                            .format(unexchangeable))

    for key, pool_payout in unexchangeable:
        blocks = {}
        hashes = set()
        for ce in (CreditExchange.query.join(CreditExchange.block, aliased=True).
                   filter_by(currency=key)):
            blocks.setdefault(ce.block, [0, []])
            hashes.add(ce.block.hash)
            blocks[ce.block][0] += ce.amount
            blocks[ce.block][1].append(ce)
            db.session.delete(ce)

        # Sanity check, make sure block objs as keys is valid
        assert len(hashes) == len(blocks)

        for block, (amount, credits) in blocks.iteritems():
            # Create a new credit for the pool to displace the deleted
            # CreditExchanges. It will always be a credit since the currency is
            # unexchangeable
            pool_block = Credit(
                source=0,
                address=pool_payout['address'],
                user=pool_payout['user'],
                currency=pool_payout['currency'].key,
                amount=amount,
                block_id=block.id,
                payable=block.mature)
            db.session.add(pool_block)

            current_app.logger.info(
                "Block {} status {} value {} removed {} CreditExchanges of {} total amount"
                .format(block, block.status, block.total_value, len(credits), amount))

        current_app.logger.info("For currency {}, updated {} blocks"
                                .format(key, len(blocks)))

    if dont_simulate is True:
        current_app.logger.info("Committing transaction!")
        db.session.commit()
    else:
        current_app.logger.info("Rolling back!")
        db.session.rollback()
Esempio n. 5
0
def update_network():
    """
    Queries the RPC servers confirmed to update network stats information.
    """
    for currency in currencies.itervalues():
        if not currency.mineable:
            continue

        try:
            gbt = currency.coinserv.getblocktemplate({})
        except (urllib3.exceptions.HTTPError, CoinRPCException) as e:
            current_app.logger.error("Unable to communicate with {} RPC server: {}"
                                     .format(currency, e))
            continue

        key = "{}_data".format(currency.key)
        block_cache_key = "{}_block_cache".format(currency.key)

        current_data = cache.get(key)
        if current_data and current_data['height'] == gbt['height']:
            # Already have information for this block
            current_app.logger.debug(
                "Not updating {} net info, height {} already recorded."
                .format(currency, current_data['height']))
        else:
            current_app.logger.info(
                "Updating {} net info for height {}.".format(currency, gbt['height']))

        # Six hours worth of blocks. how many we'll keep in the cache
        keep_count = 21600 / currency.block_time

        difficulty = bits_to_difficulty(gbt['bits'])
        cache.cache._client.lpush(block_cache_key, difficulty)
        cache.cache._client.ltrim(block_cache_key, 0, keep_count)
        diff_list = cache.cache._client.lrange(block_cache_key, 0, -1)
        difficulty_avg = sum(map(float, diff_list)) / len(diff_list)

        cache.set(key,
                  dict(height=gbt['height'],
                       difficulty=difficulty,
                       reward=gbt['coinbasevalue'] * current_app.SATOSHI,
                       difficulty_avg=difficulty_avg,
                       difficulty_avg_stale=len(diff_list) < keep_count),
                  timeout=1200)
Esempio n. 6
0
def update_network():
    """
    Queries the RPC servers confirmed to update network stats information.
    """
    for currency in currencies.itervalues():
        if not currency.mineable:
            continue

        try:
            gbt = currency.coinserv.getblocktemplate({})
        except (urllib3.exceptions.HTTPError, CoinRPCException) as e:
            current_app.logger.error("Unable to communicate with {} RPC server: {}"
                                     .format(currency, e))
            continue

        key = "{}_data".format(currency.key)
        block_cache_key = "{}_block_cache".format(currency.key)

        current_data = cache.get(key)
        if current_data and current_data['height'] == gbt['height']:
            # Already have information for this block
            current_app.logger.debug(
                "Not updating {} net info, height {} already recorded."
                .format(currency, current_data['height']))
        else:
            current_app.logger.info(
                "Updating {} net info for height {}.".format(currency, gbt['height']))

        # Six hours worth of blocks. how many we'll keep in the cache
        keep_count = 21600 / currency.block_time

        difficulty = bits_to_difficulty(gbt['bits'])
        cache.cache._client.lpush(block_cache_key, difficulty)
        cache.cache._client.ltrim(block_cache_key, 0, keep_count)
        diff_list = cache.cache._client.lrange(block_cache_key, 0, -1)
        difficulty_avg = sum(map(float, diff_list)) / len(diff_list)

        cache.set(key,
                  dict(height=gbt['height'],
                       difficulty=difficulty,
                       reward=gbt['coinbasevalue'] * current_app.SATOSHI,
                       difficulty_avg=difficulty_avg,
                       difficulty_avg_stale=len(diff_list) < keep_count),
                  timeout=1200)
Esempio n. 7
0
def forward_coinservs(host):
    """ Given a hostname, connects to a remote and tunnels all coinserver ports
    to local ports. Useful for development testing. """
    args = [host, "-N"]
    for currency in currencies.itervalues():
        if not currency.coinserv:
            continue
        args.append("-L {0}:127.0.0.1:{0}"
                    .format(currency.coinserv.config['port']))

    for pp in powerpools.itervalues():
        parts = urlparse(pp.monitor_address)
        if parts.hostname not in ['localhost', '127.0.0.1']:
            continue

        args.append("-L {0}:127.0.0.1:{0}".format(parts.port))

    current_app.logger.info(("/usr/bin/ssh", "/usr/bin/ssh", args))
    os.execl("/usr/bin/ssh", "/usr/bin/ssh", *args)
Esempio n. 8
0
def forward_coinservs(host):
    """ Given a hostname, connects to a remote and tunnels all coinserver ports
    to local ports. Useful for development testing. """
    args = [host, "-N"]
    for currency in currencies.itervalues():
        if not currency.coinserv:
            continue
        args.append("-L {0}:127.0.0.1:{0}"
                    .format(currency.coinserv.config['port']))

    for pp in powerpools.itervalues():
        parts = urlparse(pp.monitor_address)
        if parts.hostname not in ['localhost', '127.0.0.1']:
            continue

        args.append("-L {0}:127.0.0.1:{0}".format(parts.port))

    current_app.logger.info(("/usr/bin/ssh", "/usr/bin/ssh", args))
    os.execl("/usr/bin/ssh", "/usr/bin/ssh", *args)
Esempio n. 9
0
def server_status():
    """
    Periodically poll the backend to get number of workers and other general
    status information.
    """
    past_chain_profit = get_past_chain_profit()
    currency_hashrates = {}
    algo_miners = {}
    servers = {}
    raw_servers = {}
    for powerpool in powerpools.itervalues():

        server_default = dict(workers=0,
                              miners=0,
                              hashrate=0,
                              name='???',
                              profit_4d=0,
                              currently_mining='???')

        try:
            data = powerpool.request('')
        except Exception:
            current_app.logger.warn("Couldn't connect to internal monitor {}"
                                    .format(powerpool.full_info()))
            continue
        else:
            raw_servers[powerpool.stratum_address] = data
            status = {'workers': data['client_count_authed'],
                      'miners': data['address_count'],
                      'hashrate': data['hps'],
                      'name': powerpool.stratum_address,
                      'profit_4d': past_chain_profit[powerpool.chain.id]}

            server_default.update(status)
            servers[powerpool.key] = server_default

            algo_miners.setdefault(powerpool.chain.algo.key, 0)
            algo_miners[powerpool.chain.algo.key] += data['address_count']

            if 'last_flush_job' in data and 'currency' in data['last_flush_job']:
                curr = data['last_flush_job']['currency']
                servers[powerpool.key].update({'currently_mining': curr})
                currency_hashrates.setdefault(currencies[curr], 0)
                currency_hashrates[currencies[curr]] += data['hps']
                # Add hashrate to the merged networks too
                if 'merged_networks' in data['last_flush_job']:
                    for currency in data['last_flush_job']['merged_networks']:
                        currency_hashrates.setdefault(currencies[currency], 0)
                        currency_hashrates[currencies[currency]] += data['hps']

    # Set hashrate to 0 if not located
    for currency in currencies.itervalues():
        hashrate = 0
        if currency in currency_hashrates:
            hashrate = currency_hashrates[currency]

        cache.set('hashrate_' + currency.key, hashrate, timeout=120)

    cache.set('raw_server_status', raw_servers, timeout=1200)
    cache.set('server_status', servers, timeout=1200)
    cache.set('total_miners', algo_miners, timeout=1200)
Esempio n. 10
0
def server_status():
    """
    Periodically poll the backend to get number of workers and other general
    status information.
    """
    past_chain_profit = get_past_chain_profit()
    currency_hashrates = {}
    algo_miners = {}
    servers = {}
    raw_servers = {}
    for powerpool in powerpools.itervalues():

        server_default = dict(workers=0,
                              miners=0,
                              hashrate=0,
                              name='???',
                              profit_4d=0,
                              currently_mining='???')

        try:
            data = powerpool.request('')
        except Exception:
            current_app.logger.warn("Couldn't connect to internal monitor {}"
                                    .format(powerpool.full_info()))
            continue
        else:
            raw_servers[powerpool.stratum_address] = data
            status = {'workers': data['client_count_authed'],
                      'miners': data['address_count'],
                      'hashrate': data['hps'],
                      'name': powerpool.stratum_address,
                      'profit_4d': past_chain_profit[powerpool.chain.id]}

            server_default.update(status)
            servers[powerpool.key] = server_default

            algo_miners.setdefault(powerpool.chain.algo.key, 0)
            algo_miners[powerpool.chain.algo.key] += data['address_count']

            if 'last_flush_job' in data and data['last_flush_job'] \
                    and 'currency' in data['last_flush_job']:
                curr = data['last_flush_job']['currency']
                servers[powerpool.key].update({'currently_mining': curr})
                currency_hashrates.setdefault(currencies[curr], 0)
                currency_hashrates[currencies[curr]] += data['hps']
                # Add hashrate to the merged networks too
                if 'merged_networks' in data['last_flush_job']:
                    for currency in data['last_flush_job']['merged_networks']:
                        currency_hashrates.setdefault(currencies[currency], 0)
                        currency_hashrates[currencies[currency]] += data['hps']

    # Set hashrate to 0 if not located
    for currency in currencies.itervalues():
        hashrate = 0
        if currency in currency_hashrates:
            hashrate = currency_hashrates[currency]

        cache.set('hashrate_' + currency.key, hashrate, timeout=120)

    cache.set('raw_server_status', raw_servers, timeout=1200)
    cache.set('server_status', servers, timeout=1200)
    cache.set('total_miners', algo_miners, timeout=1200)