def set_data(gbt, curr=None):
        prefix = ""
        if curr:
            prefix = curr + "_"
        prev_height = cache.get(prefix + 'blockheight') or 0

        if gbt['height'] == prev_height:
            logger.debug(
                "Not updating {} net info, height {} already recorded.".format(
                    curr or 'main', prev_height))
            return
        logger.info("Updating {} net info for height {}.".format(
            curr or 'main', gbt['height']))

        # set general information for this network
        difficulty = bits_to_difficulty(gbt['bits'])
        cache.set(prefix + 'blockheight', gbt['height'], timeout=1200)
        cache.set(prefix + 'difficulty', difficulty, timeout=1200)
        cache.set(prefix + 'reward', gbt['coinbasevalue'], timeout=1200)

        # keep a configured number of blocks in the cache for getting average difficulty
        cache.cache._client.lpush(prefix + 'block_cache', gbt['bits'])
        cache.cache._client.ltrim(prefix + 'block_cache', 0,
                                  current_app.config['difficulty_avg_period'])
        diff_list = cache.cache._client.lrange(
            prefix + 'block_cache', 0,
            current_app.config['difficulty_avg_period'])
        total_diffs = sum([bits_to_difficulty(diff) for diff in diff_list])
        cache.set(prefix + 'difficulty_avg',
                  total_diffs / len(diff_list),
                  timeout=120 * 60)

        # add the difficulty as a one minute share, unless we're staging
        if not current_app.config.get('stage', False):
            now = datetime.datetime.utcnow()
            try:
                m = OneMinuteType(typ=prefix + 'netdiff',
                                  value=difficulty * 1000,
                                  time=now)
                db.session.add(m)
                db.session.commit()
            except sqlalchemy.exc.IntegrityError:
                db.session.rollback()
                slc = OneMinuteType.query.with_lockmode('update').filter_by(
                    time=now, typ=prefix + 'netdiff').one()
                # just average the diff of two blocks that occured in the same second..
                slc.value = ((difficulty * 1000) + slc.value) / 2
                db.session.commit()
예제 #2
0
    def update_profitability(self, currency):
        """ Recalculates the profitability for a specific currency """
        jobmanager = self.jobmanagers[currency]
        last_job = jobmanager.latest_job
        pscore, ratio, _ = self.price_data[currency]
        # We can't update if we don't have a job and profit data
        if last_job is None or pscore is None:
            return False

        max_blockheight = jobmanager.config['max_blockheight']
        if max_blockheight is not None and last_job.block_height >= max_blockheight:
            self.profit_data[currency] = 0
            self.logger.debug(
                "{} height {} is >= the configured maximum blockheight of {}, "
                "setting profitability to 0."
                .format(currency, last_job.block_height, max_blockheight))
            return True

        block_value = last_job.total_value / 100000000.0
        diff = bits_to_difficulty(hexlify(last_job.bits))

        self.profit_data[currency] = (block_value * float(pscore) / diff) * ratio * 1000000
        self.logger.debug(
            "Updating {} profit data;\n\tblock_value {};\n\tavg_price {:,.8f}"
            ";\n\tdiff {};\n\tratio {};\n\tresult {}"
            .format(currency, block_value, float(pscore), diff,
                    ratio, self.profit_data[currency]))
        self.manager.log_event("{name}.profitability.{curr}:{metric}|g"
                               .format(name=self.manager.config['procname'],
                                       curr=currency,
                                       metric=self.profit_data[currency]))
        return True
예제 #3
0
    def update_profitability(self, currency):
        """ Recalculates the profitability for a specific currency """
        jobmanager = self.jobmanagers[currency]
        last_job = jobmanager.latest_job
        pscore, ratio, _ = self.price_data[currency]
        # We can't update if we don't have a job and profit data
        if last_job is None or pscore is None:
            return False

        max_blockheight = jobmanager.config['max_blockheight']
        if max_blockheight is not None and last_job.block_height >= max_blockheight:
            self.profit_data[currency] = 0
            self.logger.debug(
                "{} height {} is >= the configured maximum blockheight of {}, "
                "setting profitability to 0.".format(currency,
                                                     last_job.block_height,
                                                     max_blockheight))
            return True

        block_value = last_job.total_value / 100000000.0
        diff = bits_to_difficulty(hexlify(last_job.bits))

        self.profit_data[currency] = (block_value * float(pscore) /
                                      diff) * ratio * 1000000
        self.logger.debug(
            "Updating {} profit data;\n\tblock_value {};\n\tavg_price {:,.8f}"
            ";\n\tdiff {};\n\tratio {};\n\tresult {}".format(
                currency, block_value, float(pscore), diff, ratio,
                self.profit_data[currency]))
        self.manager.log_event("{name}.profitability.{curr}:{metric}|g".format(
            name=self.manager.config['procname'],
            curr=currency,
            metric=self.profit_data[currency]))
        return True
예제 #4
0
파일: tasks.py 프로젝트: fscook/simplevert
def difficulty_avg(self):
    """
    Setup a blob with the average network difficulty for the last 500 blocks
    """
    try:
        diff_list = cache.cache._client.lrange('block_cache', 0, 500)
        total_diffs = sum([bits_to_difficulty(diff) for diff in diff_list])
        cache.set('difficulty_avg', total_diffs / len(diff_list),
                  timeout=120 * 60)
    except Exception as exc:
        logger.warn("Unknown failure in difficulty_avg", exc_info=True)
        raise self.retry(exc=exc)
예제 #5
0
    def set_data(gbt, curr=None):
        prefix = ""
        if curr:
            prefix = curr + "_"
        prev_height = cache.get(prefix + 'blockheight') or 0

        if gbt['height'] == prev_height:
            logger.debug("Not updating {} net info, height {} already recorded."
                         .format(curr or 'main', prev_height))
            return
        logger.info("Updating {} net info for height {}.".format(curr or 'main', gbt['height']))

        # set general information for this network
        difficulty = bits_to_difficulty(gbt['bits'])
        cache.set(prefix + 'blockheight', gbt['height'], timeout=1200)
        cache.set(prefix + 'difficulty', difficulty, timeout=1200)
        cache.set(prefix + 'reward', gbt['coinbasevalue'], timeout=1200)

        # keep a configured number of blocks in the cache for getting average difficulty
        cache.cache._client.lpush(prefix + 'block_cache', gbt['bits'])
        cache.cache._client.ltrim(prefix + 'block_cache', 0, current_app.config['difficulty_avg_period'])
        diff_list = cache.cache._client.lrange(prefix + 'block_cache', 0, current_app.config['difficulty_avg_period'])
        total_diffs = sum([bits_to_difficulty(diff) for diff in diff_list])
        cache.set(prefix + 'difficulty_avg', total_diffs / len(diff_list), timeout=120 * 60)

        # add the difficulty as a one minute share, unless we're staging
        if not current_app.config.get('stage', False):
            now = datetime.datetime.utcnow()
            try:
                m = OneMinuteType(typ=prefix + 'netdiff', value=difficulty * 1000, time=now)
                db.session.add(m)
                db.session.commit()
            except sqlalchemy.exc.IntegrityError:
                db.session.rollback()
                slc = OneMinuteType.query.with_lockmode('update').filter_by(
                    time=now, typ=prefix + 'netdiff').one()
                # just average the diff of two blocks that occured in the same second..
                slc.value = ((difficulty * 1000) + slc.value) / 2
                db.session.commit()
예제 #6
0
파일: tasks.py 프로젝트: helsaba/simplemona
def difficulty_avg(self):
    """
    Setup a blob with the average network difficulty for the last 500 blocks
    """
    try:
        diff_list = cache.cache._client.lrange('block_cache', 0, 500)
        total_diffs = sum([bits_to_difficulty(diff) for diff in diff_list])
        blob = Blob(key='diff', data={'diff': str(total_diffs / len(diff_list))})
        db.session.merge(blob)
        db.session.commit()
    except Exception as exc:
        logger.warn("Unknown failure in difficulty_avg", exc_info=True)
        raise self.retry(exc=exc)
예제 #7
0
파일: tasks.py 프로젝트: lae/simplemona
def new_block(self, blockheight, bits=None, reward=None):
    """
    Notification that a new block height has been reached in the network.
    Sets some things into the cache for display on the website, adds graphing
    for the network difficulty graph.
    """
    # prevent lots of duplicate rerunning...
    last_blockheight = cache.get('blockheight') or 0
    if blockheight == last_blockheight:
        logger.warn("Recieving duplicate new_block notif, ignoring...")
        return
    logger.info("Recieved notice of new block height {}".format(blockheight))

    difficulty = bits_to_difficulty(bits)
    cache.set('blockheight', blockheight, timeout=1200)
    cache.set('difficulty', difficulty, timeout=1200)
    cache.set('reward', reward, timeout=1200)

    # keep the last 500 blocks in the cache for getting average difficulty
    cache.cache._client.lpush('block_cache', bits)
    cache.cache._client.ltrim('block_cache', 0, 500)
    diff_list = cache.cache._client.lrange('block_cache', 0, 500)
    total_diffs = sum([bits_to_difficulty(diff) for diff in diff_list])
    cache.set('difficulty_avg', total_diffs / len(diff_list), timeout=120 * 60)

    # add the difficulty as a one minute share
    now = datetime.datetime.utcnow()
    try:
        m = OneMinuteType(typ='netdiff', value=difficulty * 1000, time=now)
        db.session.add(m)
        db.session.commit()
    except sqlalchemy.exc.IntegrityError:
        db.session.rollback()
        slc = OneMinuteType.query.with_lockmode('update').filter_by(
            time=now, typ='netdiff').one()
        # just average the diff of two blocks that occured in the same second..
        slc.value = ((difficulty * 1000) + slc.value) / 2
        db.session.commit()
예제 #8
0
파일: tasks.py 프로젝트: fscook/simplevert
def new_block(self, blockheight, bits=None, reward=None):
    """
    Notification that a new block height has been reached in the network.
    Sets some things into the cache for display on the website.
    """
    logger.info("Recieved notice of new block height {}".format(blockheight))

    cache.set('blockheight', blockheight, timeout=1200)
    cache.set('difficulty', bits_to_difficulty(bits), timeout=1200)
    cache.set('reward', reward, timeout=1200)

    # keep the last 500 blocks in the cache for getting average difficulty
    cache.cache._client.lpush('block_cache', bits)
    cache.cache._client.ltrim('block_cache', 0, 500)
예제 #9
0
파일: tasks.py 프로젝트: helsaba/simplemona
def new_block(self, blockheight, bits=None, reward=None):
    """
    Notification that a new block height has been reached in the network.
    """
    logger.info("Recieved notice of new block height {}".format(blockheight))
    if not isinstance(blockheight, int):
        logger.error("Invalid block height submitted, must be integer")

    blob = Blob(key='block', data={'height': str(blockheight),
                                   'difficulty': str(bits_to_difficulty(bits)),
                                   'reward': str(reward)})
    db.session.merge(blob)
    db.session.commit()

    # keep the last 500 blocks in the cache for getting average difficulty
    cache.cache._client.lpush('block_cache', bits)
    cache.cache._client.ltrim('block_cache', 0, 500)
예제 #10
0
def update_network():
    """
    Queries the RPC servers confirmed to update network stats information.
    """
    for currency in currencies.itervalues():
        if not currency.mineable:
            continue

        try:
            gbt = currency.coinserv.getblocktemplate({})
        except (urllib3.exceptions.HTTPError, CoinRPCException) as e:
            current_app.logger.error("Unable to communicate with {} RPC server: {}"
                                     .format(currency, e))
            continue

        key = "{}_data".format(currency.key)
        block_cache_key = "{}_block_cache".format(currency.key)

        current_data = cache.get(key)
        if current_data and current_data['height'] == gbt['height']:
            # Already have information for this block
            current_app.logger.debug(
                "Not updating {} net info, height {} already recorded."
                .format(currency, current_data['height']))
        else:
            current_app.logger.info(
                "Updating {} net info for height {}.".format(currency, gbt['height']))

        # Six hours worth of blocks. how many we'll keep in the cache
        keep_count = 21600 / currency.block_time

        difficulty = bits_to_difficulty(gbt['bits'])
        cache.cache._client.lpush(block_cache_key, difficulty)
        cache.cache._client.ltrim(block_cache_key, 0, keep_count)
        diff_list = cache.cache._client.lrange(block_cache_key, 0, -1)
        difficulty_avg = sum(map(float, diff_list)) / len(diff_list)

        cache.set(key,
                  dict(height=gbt['height'],
                       difficulty=difficulty,
                       reward=gbt['coinbasevalue'] * current_app.SATOSHI,
                       difficulty_avg=difficulty_avg,
                       difficulty_avg_stale=len(diff_list) < keep_count),
                  timeout=1200)
예제 #11
0
def get_block_stats(timedelta=None):
    if timedelta is None:
        timedelta = datetime.timedelta(days=30)
    average_diff = g.average_difficulty
    blocks = all_blocks()
    total_shares = 0
    total_difficulty = 0
    total_orphans = 0
    total_blocks = 0

    time_ago = datetime.datetime.utcnow() - timedelta
    for shares_to_solve, bits, orphan in (db.engine.execution_options(
            stream_results=True).execute(
                select([Block.shares_to_solve, Block.bits,
                        Block.orphan]).where(Block.merged_type == None).where(
                            Block.found_at >= time_ago).order_by(
                                Block.height.desc()))):
        total_shares += shares_to_solve
        total_difficulty += bits_to_difficulty(bits)
        if orphan is True:
            total_orphans += 1

    for block in blocks:
        if block.found_at >= time_ago:
            total_blocks += 1

    if total_orphans > 0 and total_blocks > 0:
        orphan_perc = (float(total_orphans) / total_blocks) * 100
    else:
        orphan_perc = 0

    if total_shares > 0 and total_difficulty > 0:
        pool_luck = (total_difficulty *
                     (2**32)) / (total_shares *
                                 current_app.config['hashes_per_share'])
    else:
        pool_luck = 1

    current_reward = (cache.get('reward') or 1) / 100000000.0
    coins_per_day = ((current_reward / (average_diff * (2**32 / 86400))) *
                     1000000)
    effective_return = (coins_per_day * pool_luck) * (
        (100 - orphan_perc) / 100)
    return pool_luck, effective_return, orphan_perc
예제 #12
0
def update_network():
    """
    Queries the RPC servers confirmed to update network stats information.
    """
    for currency in currencies.itervalues():
        if not currency.mineable:
            continue

        try:
            gbt = currency.coinserv.getblocktemplate({})
        except (urllib3.exceptions.HTTPError, CoinRPCException) as e:
            current_app.logger.error("Unable to communicate with {} RPC server: {}"
                                     .format(currency, e))
            continue

        key = "{}_data".format(currency.key)
        block_cache_key = "{}_block_cache".format(currency.key)

        current_data = cache.get(key)
        if current_data and current_data['height'] == gbt['height']:
            # Already have information for this block
            current_app.logger.debug(
                "Not updating {} net info, height {} already recorded."
                .format(currency, current_data['height']))
        else:
            current_app.logger.info(
                "Updating {} net info for height {}.".format(currency, gbt['height']))

        # Six hours worth of blocks. how many we'll keep in the cache
        keep_count = 21600 / currency.block_time

        difficulty = bits_to_difficulty(gbt['bits'])
        cache.cache._client.lpush(block_cache_key, difficulty)
        cache.cache._client.ltrim(block_cache_key, 0, keep_count)
        diff_list = cache.cache._client.lrange(block_cache_key, 0, -1)
        difficulty_avg = sum(map(float, diff_list)) / len(diff_list)

        cache.set(key,
                  dict(height=gbt['height'],
                       difficulty=difficulty,
                       reward=gbt['coinbasevalue'] * current_app.SATOSHI,
                       difficulty_avg=difficulty_avg,
                       difficulty_avg_stale=len(diff_list) < keep_count),
                  timeout=1200)
예제 #13
0
def get_block_stats(timedelta=None):
    if timedelta is None:
        timedelta = datetime.timedelta(days=30)
    average_diff = g.average_difficulty
    blocks = all_blocks()
    total_shares = 0
    total_difficulty = 0
    total_orphans = 0
    total_blocks = 0

    time_ago = datetime.datetime.utcnow() - timedelta
    for shares_to_solve, bits, orphan in (
        db.engine.execution_options(stream_results=True).
        execute(select([Block.shares_to_solve, Block.bits, Block.orphan]).
                where(Block.merged_type == None).
                where(Block.found_at >= time_ago).
                order_by(Block.height.desc()))):
        total_shares += shares_to_solve
        total_difficulty += bits_to_difficulty(bits)
        if orphan is True:
            total_orphans += 1

    for block in blocks:
        if block.found_at >= time_ago:
            total_blocks += 1

    if total_orphans > 0 and total_blocks > 0:
        orphan_perc = (float(total_orphans) / total_blocks) * 100
    else:
        orphan_perc = 0

    if total_shares > 0 and total_difficulty > 0:
        pool_luck = (total_difficulty * (2**32)) / (total_shares * current_app.config['hashes_per_share'])
    else:
        pool_luck = 1

    current_reward = (cache.get('reward') or 1) / 100000000.0
    coins_per_day = ((current_reward / (average_diff * (2**32 / 86400))) * 1000000)
    effective_return = (coins_per_day * pool_luck) * ((100 - orphan_perc) / 100)
    return pool_luck, effective_return, orphan_perc
예제 #14
0
파일: models.py 프로젝트: fscook/simplecoin
 def difficulty(self):
     return bits_to_difficulty(self.bits)
예제 #15
0
    def generate_job(self,
                     push=False,
                     flush=False,
                     new_block=False,
                     network='main'):
        """ Creates a new job for miners to work on. Push will trigger an
        event that sends new work but doesn't force a restart. If flush is
        true a job restart will be triggered. """

        # aux monitors will often call this early when not needed at startup
        if not self._last_gbt:
            self.logger.warn("Cannot generate new job, missing last GBT info")
            return

        if self.auxmons:
            merged_work = {}
            auxdata = {}
            for auxmon in self.auxmons:
                # If this network hasn't pushed a job yet, skip it
                if auxmon.last_work['hash'] is None:
                    continue
                merged_work[auxmon.last_work['chainid']] = dict(
                    hash=auxmon.last_work['hash'],
                    target=auxmon.last_work['type'])

            tree, size = bitcoin_data.make_auxpow_tree(merged_work)
            mm_hashes = [
                merged_work.get(tree.get(i), dict(hash=0))['hash']
                for i in xrange(size)
            ]
            mm_data = '\xfa\xbemm'
            mm_data += bitcoin_data.aux_pow_coinbase_type.pack(
                dict(
                    merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                    size=size,
                    nonce=0,
                ))

            for auxmon in self.auxmons:
                if auxmon.last_work['hash'] is None:
                    continue
                data = dict(target=auxmon.last_work['target'],
                            hash=auxmon.last_work['hash'],
                            height=auxmon.last_work['height'],
                            found_block=auxmon.found_block,
                            index=mm_hashes.index(auxmon.last_work['hash']),
                            type=auxmon.last_work['type'],
                            hashes=mm_hashes)
                auxdata[auxmon.config['currency']] = data
        else:
            auxdata = {}
            mm_data = None

        # here we recalculate the current merkle branch and partial
        # coinbases for passing to the mining clients
        coinbase = Transaction()
        coinbase.version = 2
        # create a coinbase input with encoded height and padding for the
        # extranonces so script length is accurate
        extranonce_length = (self.manager.config['extranonce_size'] +
                             self.manager.config['extranonce_serv_size'])
        coinbase.inputs.append(
            Input.coinbase(self._last_gbt['height'],
                           addtl_push=[mm_data] if mm_data else [],
                           extra_script_sig=b'\0' * extranonce_length))

        coinbase_value = self._last_gbt['coinbasevalue']

        # Payout Darkcoin masternodes
        mn_enforcement = self._last_gbt.get('enforce_masternode_payments',
                                            True)
        if (self.config['payout_drk_mn'] is True or mn_enforcement is True) \
                and self._last_gbt.get('payee', '') != '':
            # Grab the darkcoin payout amount, default to 20%
            payout = self._last_gbt.get('payee_amount', coinbase_value / 5)
            coinbase_value -= payout
            coinbase.outputs.append(
                Output.to_address(payout, self._last_gbt['payee']))
            self.logger.debug(
                "Created TX output for masternode at ({}:{}). Coinbase value "
                "reduced to {}".format(self._last_gbt['payee'], payout,
                                       coinbase_value))

        # simple output to the proper address and value
        coinbase.outputs.append(
            Output.to_address(coinbase_value, self.config['pool_address']))

        job_id = hexlify(struct.pack(str("I"), self._job_counter))

        bt_obj = BlockTemplate.from_gbt(
            self._last_gbt, coinbase, extranonce_length, [
                Transaction(unhexlify(t['data']), fees=t['fee'])
                for t in self._last_gbt['transactions']
            ])
        # add in our merged mining data
        if mm_data:
            hashes = [
                bitcoin_data.hash256(tx.raw) for tx in bt_obj.transactions
            ]
            bt_obj.merkle_link = bitcoin_data.calculate_merkle_link([None] +
                                                                    hashes, 0)
        bt_obj.merged_data = auxdata
        bt_obj.job_id = job_id
        bt_obj.diff1 = self.config['diff1']
        bt_obj.algo = self.config['algo']
        bt_obj.currency = self.config['currency']
        bt_obj.pow_block_hash = self.config['pow_block_hash']
        bt_obj.block_height = self._last_gbt['height']
        bt_obj.acc_shares = set()
        if flush:
            bt_obj.type = 0
        elif push:
            bt_obj.type = 1
        else:
            bt_obj.type = 2
        bt_obj.found_block = self.found_block

        # Push the fresh job to users after updating details
        self._job_counter += 1
        if flush:
            self.jobs.clear()
        self.jobs[job_id] = bt_obj
        self.latest_job = bt_obj

        self.new_job.job = bt_obj
        self.new_job.set()
        self.new_job.clear()
        event = ("{name}.jobmanager.new_job:1|c\n".format(
            name=self.manager.config['procname']))
        if push or flush:
            self.logger.info(
                "{}: New block template with {:,} trans. "
                "Diff {:,.4f}. Subsidy {:,.2f}. Height {:,}. Merged: {}".
                format("FLUSH" if flush else "PUSH",
                       len(self._last_gbt['transactions']),
                       bits_to_difficulty(self._last_gbt['bits']),
                       self._last_gbt['coinbasevalue'] / 100000000.0,
                       self._last_gbt['height'], ', '.join(auxdata.keys())))
            event += ("{name}.jobmanager.work_push:1|c\n".format(
                name=self.manager.config['procname']))

        # Stats and notifications now that it's pushed
        if flush:
            event += ("{name}.jobmanager.work_restart:1|c\n".format(
                name=self.manager.config['procname']))
            self.logger.info("New {} network block announced! Wiping previous"
                             " jobs and pushing".format(network))
        elif push:
            self.logger.info(
                "New {} network block announced, pushing new job!".format(
                    network))

        if new_block:
            hex_bits = hexlify(bt_obj.bits)
            self.current_net['difficulty'] = bits_to_difficulty(hex_bits)
            self.current_net['subsidy'] = bt_obj.total_value
            self.current_net['height'] = bt_obj.block_height - 1
            self.current_net['last_block'] = time.time()
            self.current_net['prev_hash'] = bt_obj.hashprev_be_hex
            self.current_net['transactions'] = len(bt_obj.transactions)

            event += ("{name}.{curr}.difficulty:{diff}|g\n"
                      "{name}.{curr}.subsidy:{subsidy}|g\n"
                      "{name}.{curr}.job_generate:{t}|g\n"
                      "{name}.{curr}.height:{height}|g".format(
                          name=self.manager.config['procname'],
                          curr=self.config['currency'],
                          diff=self.current_net['difficulty'],
                          subsidy=bt_obj.total_value,
                          height=bt_obj.block_height - 1,
                          t=(time.time() - self._last_gbt['update_time']) *
                          1000))
        self.manager.log_event(event)
예제 #16
0
    def generate_job(self, push=False, flush=False, new_block=False):
        """ Creates a new job for miners to work on. Push will trigger an
        event that sends new work but doesn't force a restart. If flush is
        true a job restart will be triggered. """

        # aux monitors will often call this early when not needed at startup
        if self.last_gbt is None:
            return

        merged_work = self.net_state['merged_work']
        if self.net_state['merged_work']:
            tree, size = bitcoin_data.make_auxpow_tree(merged_work)
            mm_hashes = [merged_work.get(tree.get(i), dict(hash=0))['hash']
                         for i in xrange(size)]
            mm_data = '\xfa\xbemm'
            mm_data += bitcoin_data.aux_pow_coinbase_type.pack(dict(
                merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                size=size,
                nonce=0,
            ))
            mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes)
                        for chain_id, aux_work in merged_work.iteritems()]
        else:
            mm_later = []
            mm_data = None

        with self.job_lock:
            # here we recalculate the current merkle branch and partial
            # coinbases for passing to the mining clients
            coinbase = Transaction()
            coinbase.version = 2
            # create a coinbase input with encoded height and padding for the
            # extranonces so script length is accurate
            extranonce_length = (self.config['extranonce_size'] +
                                 self.config['extranonce_serv_size'])
            coinbase.inputs.append(
                Input.coinbase(self.last_gbt['height'],
                               addtl_push=[mm_data] if mm_data else [],
                               extra_script_sig=b'\0' * extranonce_length))
            # simple output to the proper address and value
            coinbase.outputs.append(
                Output.to_address(self.last_gbt['coinbasevalue'] - self.last_gbt['fees'], self.config['pool_address']))
            job_id = hexlify(struct.pack(str("I"), self.net_state['job_counter']))
            logger.info("Generating new block template with {} trans. Diff {}. Subsidy {}. Fees {}."
                        .format(len(self.net_state['transactions']),
                                bits_to_difficulty(self.last_gbt['bits']),
                                self.last_gbt['coinbasevalue'],
                                self.last_gbt['fees']))
            bt_obj = BlockTemplate.from_gbt(self.last_gbt, coinbase, extranonce_length, [])
            bt_obj.mm_later = copy(mm_later)
            # hashes = [bitcoin_data.hash256(tx.raw) for tx in bt_obj.transactions]
            bt_obj.merkle_link = bitcoin_data.calculate_merkle_link([None], 0)
            bt_obj.job_id = job_id
            bt_obj.block_height = self.last_gbt['height']
            bt_obj.acc_shares = set()

        if push:
            if flush:
                logger.info("New work announced! Wiping previous jobs...")
                self.net_state['jobs'].clear()
                self.net_state['latest_job'] = None
            else:
                logger.info("New work announced!")

        self.net_state['job_counter'] += 1
        self.net_state['jobs'][job_id] = bt_obj
        self.net_state['latest_job'] = job_id
        if push:
            for idx, client in viewitems(self.stratum_clients):
                try:
                    if flush:
                        client.new_block_event.set()
                    else:
                        client.new_work_event.set()
                except AttributeError:
                    pass

        if new_block:
            if self.config['send_new_block']:
                hex_bits = hexlify(bt_obj.bits)
                self.celery.send_task_pp('new_block',
                                         bt_obj.block_height,
                                         hex_bits,
                                         bt_obj.total_value)
            self.net_state['difficulty'] = bits_to_difficulty(hex_bits)
예제 #17
0
    def generate_job(self, push=False, flush=False, new_block=False):
        """ Creates a new job for miners to work on. Push will trigger an
        event that sends new work but doesn't force a restart. If flush is
        true a job restart will be triggered. """

        # aux monitors will often call this early when not needed at startup
        if self.last_gbt is None:
            return

        merged_work = self.net_state['merged_work']
        if self.net_state['merged_work']:
            tree, size = bitcoin_data.make_auxpow_tree(merged_work)
            mm_hashes = [
                merged_work.get(tree.get(i), dict(hash=0))['hash']
                for i in xrange(size)
            ]
            mm_data = '\xfa\xbemm'
            mm_data += bitcoin_data.aux_pow_coinbase_type.pack(
                dict(
                    merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                    size=size,
                    nonce=0,
                ))
            mm_later = [(aux_work, mm_hashes.index(aux_work['hash']),
                         mm_hashes)
                        for chain_id, aux_work in merged_work.iteritems()]
        else:
            mm_later = []
            mm_data = None

        # here we recalculate the current merkle branch and partial
        # coinbases for passing to the mining clients
        coinbase = Transaction()
        coinbase.version = 2
        # create a coinbase input with encoded height and padding for the
        # extranonces so script length is accurate
        extranonce_length = (self.config['extranonce_size'] +
                             self.config['extranonce_serv_size'])
        coinbase.inputs.append(
            Input.coinbase(self.last_gbt['height'],
                           addtl_push=[mm_data] if mm_data else [],
                           extra_script_sig=b'\0' * extranonce_length))
        # simple output to the proper address and value
        coinbase.outputs.append(
            Output.to_address(self.last_gbt['coinbasevalue'],
                              self.config['pool_address']))
        job_id = hexlify(struct.pack(str("I"), self.net_state['job_counter']))
        logger.info(
            "Generating new block template with {} trans. Diff {}. Subsidy {}."
            .format(len(self.last_gbt['transactions']),
                    bits_to_difficulty(self.last_gbt['bits']),
                    self.last_gbt['coinbasevalue']))
        bt_obj = BlockTemplate.from_gbt(
            self.last_gbt, coinbase, extranonce_length, [
                Transaction(unhexlify(t['data']), fees=t['fee'])
                for t in self.last_gbt['transactions']
            ])
        bt_obj.mm_later = copy(mm_later)
        hashes = [bitcoin_data.hash256(tx.raw) for tx in bt_obj.transactions]
        bt_obj.merkle_link = bitcoin_data.calculate_merkle_link([None] +
                                                                hashes, 0)
        bt_obj.job_id = job_id
        bt_obj.block_height = self.last_gbt['height']
        bt_obj.acc_shares = set()

        if push:
            if flush:
                logger.info("New work announced! Wiping previous jobs...")
                self.net_state['jobs'].clear()
                self.net_state['latest_job'] = None
            else:
                logger.info("New work announced!")

        self.net_state['job_counter'] += 1
        self.net_state['jobs'][job_id] = bt_obj
        self.net_state['latest_job'] = job_id
        if push:
            for idx, client in viewitems(self.stratum_clients):
                try:
                    if flush:
                        client.new_block_event.set()
                    else:
                        client.new_work_event.set()
                except AttributeError:
                    pass

        if new_block:
            hex_bits = hexlify(bt_obj.bits)
            self.net_state['work']['difficulty'] = bits_to_difficulty(hex_bits)
            if self.config['send_new_block']:
                self.celery.send_task_pp('new_block', bt_obj.block_height,
                                         hex_bits, bt_obj.total_value)
예제 #18
0
def credit_block(redis_key, simulate=False):
    """
    Calculates credits for users from share records for the latest found block.
    """
    # Don't do this truthiness thing
    if simulate is not True:
        simulate = False
    if simulate:
        current_app.logger.warn("Running in simulate mode, no commit will be performed")
        current_app.logger.setLevel(logging.DEBUG)

    data = redis_conn.hgetall(redis_key)
    current_app.logger.debug("Processing block with details {}".format(data))
    merged = bool(int(data.get('merged', False)))
    # If start_time isn't listed explicitly do our best to derive from
    # statistical share records
    if 'start_time' in data:
        time_started = datetime.datetime.utcfromtimestamp(float(data.get('start_time')))
    else:
        time_started = last_block_time(data['algo'], merged=merged)

    block = Block(
        user=data.get('address'),
        height=data['height'],
        total_value=(Decimal(data['total_subsidy']) / 100000000),
        transaction_fees=(Decimal(data['fees']) / 100000000),
        difficulty=bits_to_difficulty(data['hex_bits']),
        hash=data['hash'],
        time_started=time_started,
        currency=data['currency'],
        worker=data.get('worker'),
        found_at=datetime.datetime.utcfromtimestamp(float(data['solve_time'])),
        algo=data['algo'],
        merged=merged)

    db.session.add(block)
    db.session.flush()

    # Parse out chain results from the block key
    chain_data = {}
    chain_default = {'shares': Decimal('0')}

    for key, value in data.iteritems():
        if key.startswith("chain_"):
            _, chain_id, key = key.split("_", 2)
            chain_id = int(chain_id)
            chain = chain_data.setdefault(chain_id, chain_default.copy())
            chain['id'] = chain_id
            if key == "shares":
                value = Decimal(value)
            elif key == "solve_index":
                value = int(value)
            # XXX: Could do extra check for setting duplicate data (overrite) here
            chain[key] = value

    # Objectize the data. Use object to store all information moving forward
    chains = []
    for id, chain in chain_data.iteritems():
        if chain['shares'] == 0:
            continue
        cpo = ChainPayout(chainid=id,
                          block=block,
                          solve_slice=chain['solve_index'],
                          chain_shares=chain['shares'])
        cpo.user_shares = {}
        cpo.credits = {}
        db.session.add(cpo)
        chains.append(cpo)

    # XXX: Would be good to check compositeprimarykey integrity here, but will
    # fail on other constraints
    #db.session.flush()

    # XXX: Change to a tabulate print
    current_app.logger.info("Parsed out chain data of {}".format(chain_data))

    # Distribute total block value among chains
    share_distrib = {chain.chainid: chain.chain_shares for chain in chains}
    distrib = distributor(block.total_value, share_distrib)
    for chain in chains:
        chain.amount = distrib[chain.chainid]

    # Fetch the share distribution for this payout chain
    users = set()
    for chain in chains:
        # Actually fetch the shares from redis!
        chain.user_shares = chain.config_obj.calc_shares(chain)
        # If we have nothing, default to paying out the block finder everything
        if not chain.user_shares:
            chain.user_shares[block.user] = 1
        # Add the users to the set, no dups
        users.update(chain.user_shares.keys())

        # Record how many shares were used to payout
        chain.payout_shares = sum(chain.user_shares.itervalues())

    # Grab all possible user based settings objects for all chains
    custom_settings = {}
    if users:
        custom_settings = {s.user: s for s in UserSettings.query.filter(
            UserSettings.user.in_(users)).all()}

    # XXX: Double check that currency code lookups will work relying on
    # currency obj hashability

    # The currencies that are valid to pay out in from this block. Basically,
    # this block currency + all exchangeable currencies if this block's
    # currency is also exchangeable
    valid_currencies = [block.currency_obj]
    if block.currency_obj.exchangeable is True:
        valid_currencies.extend(currencies.exchangeable_currencies)

    # Get the pools payout information for this block
    global_curr = global_config.pool_payout_currency
    pool_payout = dict(address=block.currency_obj.pool_payout_addr,
                       currency=block.currency_obj,
                       user=global_curr.pool_payout_addr)
    # If this currency has no payout address, switch to global default
    if pool_payout['address'] is None:
        pool_payout['address'] = global_curr.pool_payout_addr
        pool_payout['currency'] = global_curr
        assert block.currency_obj.exchangeable, "Block is un-exchangeable"

    # Double check valid. Paranoid
    address_version(pool_payout['address'])

    def filter_valid(user, address, currency):
        try:
            if isinstance(currency, basestring):
                currency = currencies[currency]
        except KeyError:
            current_app.logger.debug(
                "Converted user {}, addr {}, currency {} => pool addr"
                " because invalid currency"
                .format(user, address, currency))
            return pool_payout
        if currency not in valid_currencies:
            current_app.logger.debug(
                "Converted user {}, addr {}, currency {} => pool addr"
                " because invalid currency"
                .format(user, address, currency))
            return pool_payout

        return dict(address=address, currency=currency, user=user)

    # Parse usernames and user settings to build appropriate credit objects
    for chain in chains:
        for username in chain.user_shares.keys():
            try:
                version = address_version(username)
            except Exception:
                # Give these shares to the pool, invalid address version
                chain.make_credit_obj(shares=chain.user_shares[username],
                                      **pool_payout)
                continue

            currency = currencies.version_map.get(version)
            # Check to see if we need to treat them real special :p
            settings = custom_settings.get(username)
            shares = chain.user_shares.pop(username)
            if settings:
                converted = settings.apply(
                    shares, currency, block.currency, valid_currencies)
                # Check to make sure no funny business
                assert sum(c[2] for c in converted) == shares, "Settings apply function returned bad stuff"
                # Create the separate payout objects from settings return info
                for address, currency, shares in converted:
                    chain.make_credit_obj(
                        shares=shares,
                        **filter_valid(username, address, currency))
            else:
                # (try to) Payout directly to mining address
                chain.make_credit_obj(
                    shares=shares,
                    **filter_valid(username, username, currency))

    # Calculate the portion that each user recieves
    for chain in chains:
        chain.distribute()

    # Another double check
    paid = 0
    fees_collected = 0
    donations_collected = 0
    for chain in chains:
        chain_fee_perc = chain.config_obj.fee_perc
        for key, credit in chain.credits.items():
            # don't try to payout users with zero payout
            if credit.amount == 0:
                db.session.expunge(credit)
                del chain.credits[key]
                continue

            # Skip fees/donations for the pool address
            if credit.user == pool_payout['user']:
                continue

            # To do a final check of payout amount
            paid += credit.amount

            # Fee/donation/bonus lookup
            fee_perc = chain_fee_perc
            donate_perc = Decimal('0')
            settings = custom_settings.get(credit.user)
            if settings:
                donate_perc = settings.pdonation_perc

            # Application
            assert isinstance(fee_perc, Decimal)
            assert isinstance(donate_perc, Decimal)
            fee_amount = credit.amount * fee_perc
            donate_amount = credit.amount * donate_perc
            credit.amount -= fee_amount
            credit.amount -= donate_amount

            # Recording
            credit.fee_perc = int(fee_perc * 100)
            credit.pd_perc = int(donate_perc * 100)

            # Bookkeeping
            donations_collected += donate_amount
            fees_collected += fee_amount

    if fees_collected > 0:
        p = Credit.make_credit(
            user=pool_payout['user'],
            block=block,
            currency=pool_payout['currency'].key,
            source=1,
            address=pool_payout['address'],
            amount=+fees_collected)
        db.session.add(p)

    if donations_collected > 0:
        p = Credit.make_credit(
            user=pool_payout['user'],
            block=block,
            currency=pool_payout['currency'].key,
            source=2,
            address=pool_payout['address'],
            amount=+donations_collected)
        db.session.add(p)

    current_app.logger.info("Collected {} in donation".format(donations_collected))
    current_app.logger.info("Collected {} from fees".format(fees_collected))
    current_app.logger.info("Net swing from block {}"
                            .format(fees_collected + donations_collected))

    pool_key = (pool_payout['user'], pool_payout['address'], pool_payout['currency'])
    for chain in chains:
        if pool_key not in chain.credits:
            continue
        current_app.logger.info(
            "Collected {} from invalid mining addresses on chain {}"
            .format(chain.credits[pool_key].amount, chain.chainid))

    if not simulate:
        db.session.commit()
        redis_conn.delete(redis_key)
    else:
        db.session.rollback()
예제 #19
0
파일: netmon.py 프로젝트: fscook/powerpool
def monitor_network(stratum_clients, net_state, config, server_state, celery):
    def push_new_block():
        """ Called when a new block was discovered in the longest blockchain.
        This will dump current jobs, create a new job, and then push the
        new job to all mining clients """
        for idx, client in viewitems(stratum_clients):
            try:
                logger.debug("Signaling new block for client {}".format(idx))
                client.new_block_event.set()
            except AttributeError:
                pass

    def update_pool(conn):
        try:
            # request local memory pool and load it in
            bt = conn.getblocktemplate({'capabilities': [
                'coinbasevalue',
                'coinbase/append',
                'coinbase',
                'generation',
                'time',
                'transactions/remove',
                'prevblock',
            ]})
        except Exception:
            logger.warn("Failed to fetch new job, RPC must be down..")
            down_connection(conn, net_state)
            return False
        dirty = 0   # track a change in the transaction pool
        for trans in bt['transactions']:
            if trans['hash'] not in net_state['transactions']:
                dirty += 1
                new_trans = Transaction(unhexlify(trans['data']),
                                        fees=trans['fee'])
                assert trans['hash'] == new_trans.lehexhash
                net_state['transactions'][trans['hash']] = new_trans

        if dirty or len(net_state['jobs']) == 0:
            # here we recalculate the current merkle branch and partial
            # coinbases for passing to the mining clients
            coinbase = Transaction()
            coinbase.version = 2
            # create a coinbase input with encoded height and padding for the
            # extranonces so script length is accurate
            extranonce_length = (config['extranonce_size'] +
                                 config['extranonce_serv_size'])
            coinbase.inputs.append(
                Input.coinbase(bt['height'], b'\0' * extranonce_length))
            # simple output to the proper address and value
            fees = 0
            for t in net_state['transactions'].itervalues():
                fees += t.fees
            coinbase.outputs.append(
                Output.to_address(bt['coinbasevalue'] - fees, config['pool_address']))
            job_id = hexlify(pack(str("I"), net_state['job_counter']))
            logger.info("Generating new block template with {} trans"
                        .format(len(net_state['transactions'])))
            bt_obj = BlockTemplate.from_gbt(bt, coinbase, extranonce_length, [])
            bt_obj.job_id = job_id
            bt_obj.block_height = bt['height']
            bt_obj.acc_shares = set()
            net_state['job_counter'] += 1
            net_state['jobs'][job_id] = bt_obj
            net_state['latest_job'] = job_id
            logger.debug("Adding {} new transactions to transaction pool, "
                         "created job {}".format(dirty, job_id))

            return bt_obj

    def check_height(conn):
        # check the block height
        try:
            height = conn.getblockcount()
        except Exception:
            logger.warn("Unable to communicate with server that thinks it's live.")
            down_connection(conn, net_state)
            return False

        if net_state['current_height'] != height:
            net_state['current_height'] = height
            return True
        return False

    i = 0
    while True:
        try:
            conn = net_state['poll_connection']
            if conn is None:
                logger.warn("Couldn't connect to any RPC servers, sleeping for 1")
                sleep(1)
                continue

            # if there's a new block registered
            if check_height(conn):
                # dump the current transaction pool, refresh and push the
                # event
                logger.debug("New block announced! Wiping previous jobs...")
                net_state['transactions'].clear()
                net_state['jobs'].clear()
                net_state['latest_job'] = None
                bt_obj = update_pool(conn)
                if not bt_obj:
                    continue
                push_new_block()
                if bt_obj is None:
                    logger.error("None returned from push_new_block after "
                                 "clearning jobs...")
                else:
                    hex_bits = hexlify(bt_obj.bits)
                    celery.send_task_pp('new_block', bt_obj.block_height, hex_bits, bt_obj.total_value)
                    net_state['difficulty'] = bits_to_difficulty(hex_bits)
            else:
                # check for new transactions when count interval has passed
                if i >= config['job_generate_int']:
                    i = 0
                    update_pool(conn)
                i += 1
        except Exception:
            logger.error("Unhandled exception!", exc_info=True)
            pass

        sleep(config['block_poll'])
예제 #20
0
 def test_bits_to_diff(self):
     # assert a difficulty of zero returns the correct integer
     self.assertEquals(bits_to_difficulty("1d00ffff"), 1)
     self.assertEquals(int(bits_to_difficulty("1b3be743")), 1094)
예제 #21
0
    def generate_job(self, push=False, flush=False, new_block=False, network='main'):
        """ Creates a new job for miners to work on. Push will trigger an
        event that sends new work but doesn't force a restart. If flush is
        true a job restart will be triggered. """

        # aux monitors will often call this early when not needed at startup
        if not self._last_gbt:
            self.logger.warn("Cannot generate new job, missing last GBT info")
            return

        if self.auxmons:
            merged_work = {}
            auxdata = {}
            for auxmon in self.auxmons:
                if auxmon.last_work['hash'] is None:
                    continue
                merged_work[auxmon.last_work['chainid']] = dict(
                    hash=auxmon.last_work['hash'],
                    target=auxmon.last_work['type']
                )

            tree, size = bitcoin_data.make_auxpow_tree(merged_work)
            mm_hashes = [merged_work.get(tree.get(i), dict(hash=0))['hash']
                         for i in xrange(size)]
            mm_data = '\xfa\xbemm'
            mm_data += bitcoin_data.aux_pow_coinbase_type.pack(dict(
                merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                size=size,
                nonce=0,
            ))

            for auxmon in self.auxmons:
                if auxmon.last_work['hash'] is None:
                    continue
                data = dict(target=auxmon.last_work['target'],
                            hash=auxmon.last_work['hash'],
                            height=auxmon.last_work['height'],
                            found_block=auxmon.found_block,
                            index=mm_hashes.index(auxmon.last_work['hash']),
                            type=auxmon.last_work['type'],
                            hashes=mm_hashes)
                auxdata[auxmon.config['currency']] = data
        else:
            auxdata = {}
            mm_data = None

        # here we recalculate the current merkle branch and partial
        # coinbases for passing to the mining clients
        coinbase = Transaction()
        coinbase.version = 2
        # create a coinbase input with encoded height and padding for the
        # extranonces so script length is accurate
        extranonce_length = (self.manager.config['extranonce_size'] +
                             self.manager.config['extranonce_serv_size'])
        coinbase.inputs.append(
            Input.coinbase(self._last_gbt['height'],
                           addtl_push=[mm_data] if mm_data else [],
                           extra_script_sig=b'\0' * extranonce_length))

        coinbase_value = self._last_gbt['coinbasevalue']

        # Payout Darkcoin masternodes
        mn_enforcement = self._last_gbt.get('enforce_masternode_payments', True)
        if (self.config['payout_drk_mn'] is True or mn_enforcement is True) \
                and self._last_gbt.get('payee', '') != '':
            # Grab the darkcoin payout amount, default to 20%
            payout = self._last_gbt.get('payee_amount', coinbase_value / 5)
            coinbase_value -= payout
            coinbase.outputs.append(
                Output.to_address(payout, self._last_gbt['payee']))
            self.logger.debug(
                "Created TX output for masternode at ({}:{}). Coinbase value "
                "reduced to {}".format(self._last_gbt['payee'], payout,
                                       coinbase_value))

        # simple output to the proper address and value
        coinbase.outputs.append(
            Output.to_address(coinbase_value, self.config['pool_address']))

        job_id = hexlify(struct.pack(str("I"), self._job_counter))
        bt_obj = BlockTemplate.from_gbt(self._last_gbt,
                                        coinbase,
                                        extranonce_length,
                                        [Transaction(unhexlify(t['data']), fees=t['fee'])
                                         for t in self._last_gbt['transactions']])
        # add in our merged mining data
        if mm_data:
            hashes = [bitcoin_data.hash256(tx.raw) for tx in bt_obj.transactions]
            bt_obj.merkle_link = bitcoin_data.calculate_merkle_link([None] + hashes, 0)
        bt_obj.merged_data = auxdata
        bt_obj.job_id = job_id
        bt_obj.diff1 = self.config['diff1']
        bt_obj.algo = self.config['algo']
        bt_obj.currency = self.config['currency']
        bt_obj.pow_block_hash = self.config['pow_block_hash']
        bt_obj.block_height = self._last_gbt['height']
        bt_obj.acc_shares = set()
        if flush:
            bt_obj.type = 0
        elif push:
            bt_obj.type = 1
        else:
            bt_obj.type = 2
        bt_obj.found_block = self.found_block

        # Push the fresh job to users after updating details
        self._job_counter += 1
        if flush:
            self.jobs.clear()
        self.jobs[job_id] = bt_obj
        self.latest_job = bt_obj

        self.new_job.job = bt_obj
        self.new_job.set()
        self.new_job.clear()
        event = ("{name}.jobmanager.new_job:1|c\n"
                 .format(name=self.manager.config['procname']))
        if push or flush:
            self.logger.info(
                "{}: New block template with {:,} trans. "
                "Diff {:,.4f}. Subsidy {:,.2f}. Height {:,}. Merged: {}"
                .format("FLUSH" if flush else "PUSH",
                        len(self._last_gbt['transactions']),
                        bits_to_difficulty(self._last_gbt['bits']),
                        self._last_gbt['coinbasevalue'] / 100000000.0,
                        self._last_gbt['height'],
                        ', '.join(auxdata.keys())))
            event += ("{name}.jobmanager.work_push:1|c\n"
                      .format(name=self.manager.config['procname']))

        # Stats and notifications now that it's pushed
        if flush:
            event += ("{name}.jobmanager.work_restart:1|c\n"
                      .format(name=self.manager.config['procname']))
            self.logger.info("New {} network block announced! Wiping previous"
                             " jobs and pushing".format(network))
        elif push:
            self.logger.info("New {} network block announced, pushing new job!"
                             .format(network))

        if new_block:
            hex_bits = hexlify(bt_obj.bits)
            self.current_net['difficulty'] = bits_to_difficulty(hex_bits)
            self.current_net['subsidy'] = bt_obj.total_value
            self.current_net['height'] = bt_obj.block_height - 1
            self.current_net['last_block'] = time.time()
            self.current_net['prev_hash'] = bt_obj.hashprev_be_hex
            self.current_net['transactions'] = len(bt_obj.transactions)

            event += (
                "{name}.{curr}.difficulty:{diff}|g\n"
                "{name}.{curr}.subsidy:{subsidy}|g\n"
                "{name}.{curr}.job_generate:{t}|g\n"
                "{name}.{curr}.height:{height}|g"
                .format(name=self.manager.config['procname'],
                        curr=self.config['currency'],
                        diff=self.current_net['difficulty'],
                        subsidy=bt_obj.total_value,
                        height=bt_obj.block_height - 1,
                        t=(time.time() - self._last_gbt['update_time']) * 1000))
        self.manager.log_event(event)
예제 #22
0
def credit_block(redis_key, simulate=False):
    """
    Calculates credits for users from share records for the latest found block.
    """
    # Don't do this truthiness thing
    if simulate is not True:
        simulate = False
    if simulate:
        current_app.logger.warn(
            "Running in simulate mode, no DB commit will be performed")
        current_app.logger.setLevel(logging.DEBUG)

    data = redis_conn.hgetall(redis_key)
    current_app.logger.debug("Processing block with details {}".format(data))
    merged = bool(int(data.get('merged', False)))
    # If start_time isn't listed explicitly do our best to derive from
    # statistical share records
    if 'start_time' in data:
        time_started = datetime.datetime.utcfromtimestamp(
            float(data.get('start_time')))
    else:
        time_started = last_block_time(data['algo'], merged=merged)

    if data['fees'] == "None":
        data['fees'] = 0

    block = Block(
        user=data.get('address'),
        height=data['height'],
        total_value=(Decimal(data['total_subsidy']) / 100000000),
        transaction_fees=(Decimal(data['fees']) / 100000000),
        difficulty=bits_to_difficulty(data['hex_bits']),
        hash=data['hash'],
        time_started=time_started,
        currency=data['currency'],
        worker=data.get('worker'),
        found_at=datetime.datetime.utcfromtimestamp(float(data['solve_time'])),
        algo=data['algo'],
        merged=merged)

    db.session.add(block)
    db.session.flush()

    # Parse out chain results from the block key
    chain_data = {}
    chain_default = {'shares': Decimal('0')}

    for key, value in data.iteritems():
        if key.startswith("chain_"):
            _, chain_id, key = key.split("_", 2)
            chain_id = int(chain_id)
            chain = chain_data.setdefault(chain_id, chain_default.copy())
            chain['id'] = chain_id
            if key == "shares":
                value = Decimal(value)
            elif key == "solve_index":
                value = int(value)
            # XXX: Could do extra check for setting duplicate data (overrite) here
            chain[key] = value

    # Objectize the data. Use object to store all information moving forward
    chains = []
    for id, chain in chain_data.iteritems():
        if chain['shares'] == 0:
            continue
        cpo = ChainPayout(chainid=id,
                          block=block,
                          solve_slice=chain['solve_index'],
                          chain_shares=chain['shares'])
        cpo.user_shares = {}
        cpo.credits = {}
        db.session.add(cpo)
        chains.append(cpo)

    # XXX: Would be good to check compositeprimarykey integrity here, but will
    # fail on other constraints
    #db.session.flush()

    # XXX: Change to a tabulate print
    current_app.logger.info("Parsed out chain data of {}".format(chain_data))

    # Distribute total block value among chains
    share_distrib = {chain.chainid: chain.chain_shares for chain in chains}
    distrib = distributor(block.total_value, share_distrib)
    for chain in chains:
        chain.amount = distrib[chain.chainid]

    # Fetch the share distribution for this payout chain
    users = set()
    for chain in chains:
        # Actually fetch the shares from redis!
        chain.user_shares = chain.config_obj.calc_shares(chain)
        # If we have nothing, default to paying out the block finder everything
        if not chain.user_shares:
            chain.user_shares[block.user] = 1
        # Add the users to the set, no dups
        users.update(chain.user_shares.keys())

        # Record how many shares were used to payout
        chain.payout_shares = sum(chain.user_shares.itervalues())

    # Grab all possible user based settings objects for all chains
    custom_settings = {}
    if users:
        custom_settings = {s.user: s for s in UserSettings.query.filter(
            UserSettings.user.in_(users)).all()}

    # XXX: Double check that currency code lookups will work relying on
    # currency obj hashability

    # The currencies that are valid to pay out in from this block. Basically,
    # this block currency + all buyable currencies if this block's currency is
    # sellable
    valid_currencies = [block.currency_obj]
    if block.currency_obj.sellable is True:
        valid_currencies.extend(currencies.buyable_currencies)

    pool_payout = block.currency_obj.pool_payout

    def filter_valid(user, address, currency):
        try:
            if isinstance(currency, basestring):
                currency = currencies[currency]
        except KeyError:
            current_app.logger.debug(
                "Converted user {}, addr {}, currency {} => pool addr"
                " because invalid currency"
                .format(user, address, currency))
            return pool_payout
        if currency not in valid_currencies:
            current_app.logger.debug(
                "Converted user {}, addr {}, currency {} => pool addr"
                " because invalid currency"
                .format(user, address, currency))
            return pool_payout

        return dict(address=address, currency=currency, user=user)

    # Parse usernames and user settings to build appropriate credit objects
    for chain in chains:
        for username in chain.user_shares.keys():
            try:
                version = address_version(username)
            except Exception:
                # Give these shares to the pool, invalid address version
                chain.make_credit_obj(shares=chain.user_shares[username],
                                      **pool_payout)
                continue

            currency = currencies.version_map.get(version)
            # Check to see if we need to treat them real special :p
            settings = custom_settings.get(username)
            shares = chain.user_shares.pop(username)
            if settings:
                converted = settings.apply(
                    shares, currency, block.currency, valid_currencies)
                # Check to make sure no funny business
                assert sum(c[2] for c in converted) == shares, "Settings apply function returned bad stuff"
                # Create the separate payout objects from settings return info
                for address, currency, shares in converted:
                    chain.make_credit_obj(
                        shares=shares,
                        **filter_valid(username, address, currency))
            else:
                # (try to) Payout directly to mining address
                chain.make_credit_obj(
                    shares=shares,
                    **filter_valid(username, username, currency))

    # Calculate the portion that each user recieves
    for chain in chains:
        chain.distribute()

    # Another double check
    paid = 0
    fees_collected = 0
    donations_collected = 0
    for chain in chains:
        chain_fee_perc = chain.config_obj.fee_perc
        for key, credit in chain.credits.items():
            # don't try to payout users with zero payout
            if credit.amount == 0:
                db.session.expunge(credit)
                del chain.credits[key]
                continue

            # Skip fees/donations for the pool address
            if credit.user == pool_payout['user']:
                continue

            # To do a final check of payout amount
            paid += credit.amount

            # Fee/donation/bonus lookup
            fee_perc = chain_fee_perc
            donate_perc = Decimal('0')
            settings = custom_settings.get(credit.user)
            if settings:
                donate_perc = settings.pdonation_perc

            # Application
            assert isinstance(fee_perc, Decimal)
            assert isinstance(donate_perc, Decimal)
            fee_amount = credit.amount * fee_perc
            donate_amount = credit.amount * donate_perc
            credit.amount -= fee_amount
            credit.amount -= donate_amount

            # Recording
            credit.fee_perc = int(fee_perc * 100)
            credit.pd_perc = int(donate_perc * 100)

            # Bookkeeping
            donations_collected += donate_amount
            fees_collected += fee_amount

    if fees_collected > 0:
        p = Credit.make_credit(
            user=pool_payout['user'],
            block=block,
            currency=pool_payout['currency'].key,
            source=1,
            address=pool_payout['address'],
            amount=+fees_collected)
        db.session.add(p)

    if donations_collected > 0:
        p = Credit.make_credit(
            user=pool_payout['user'],
            block=block,
            currency=pool_payout['currency'].key,
            source=2,
            address=pool_payout['address'],
            amount=+donations_collected)
        db.session.add(p)

    current_app.logger.info("Collected {} {} in donation"
                            .format(donations_collected, block.currency))
    current_app.logger.info("Collected {} {} from fees"
                            .format(fees_collected, block.currency))
    current_app.logger.info(
        "Net swing from block {} {}"
        .format(fees_collected + donations_collected, block.currency))

    pool_key = (pool_payout['user'], pool_payout['address'],
                pool_payout['currency'])

    for chain in chains:
        if pool_key not in chain.credits:
            continue
        current_app.logger.info(
            "Collected {} from invalid mining addresses on chain {}"
            .format(chain.credits[pool_key].amount, chain.chainid))

    if not simulate:
        db.session.commit()
        redis_conn.delete(redis_key)
    else:
        db.session.rollback()
예제 #23
0
    def generate_job(self, push=False, flush=False, new_block=False):
        """ Creates a new job for miners to work on. Push will trigger an
        event that sends new work but doesn't force a restart. If flush is
        true a job restart will be triggered. """

        # aux monitors will often call this early when not needed at startup
        if not self._last_gbt:
            self.logger.warn("Cannot generate new job, missing last GBT info")
            return

        if self.merged_work:
            tree, size = bitcoin_data.make_auxpow_tree(self.merged_work)
            mm_hashes = [self.merged_work.get(tree.get(i), dict(hash=0))['hash']
                         for i in xrange(size)]
            mm_data = '\xfa\xbemm'
            mm_data += bitcoin_data.aux_pow_coinbase_type.pack(dict(
                merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                size=size,
                nonce=0,
            ))
            merged_data = {}
            for aux_work in self.merged_work.itervalues():
                data = dict(target=aux_work['target'],
                            hash=aux_work['hash'],
                            height=aux_work['height'],
                            index=mm_hashes.index(aux_work['hash']),
                            type=aux_work['type'],
                            hashes=mm_hashes)
                merged_data[aux_work['type']] = data
        else:
            merged_data = {}
            mm_data = None

        self.logger.info("Generating new block template with {} trans. "
                         "Diff {:,.4f}. Subsidy {:,.2f}. Height {:,}. "
                         "Merged chains: {}"
                         .format(len(self._last_gbt['transactions']),
                                 bits_to_difficulty(self._last_gbt['bits']),
                                 self._last_gbt['coinbasevalue'] / 100000000.0,
                                 self._last_gbt['height'],
                                 ', '.join(merged_data.keys())))

        # here we recalculate the current merkle branch and partial
        # coinbases for passing to the mining clients
        coinbase = Transaction()
        coinbase.version = 2
        # create a coinbase input with encoded height and padding for the
        # extranonces so script length is accurate
        extranonce_length = (self.config['extranonce_size'] +
                             self.config['extranonce_serv_size'])
        coinbase.inputs.append(
            Input.coinbase(self._last_gbt['height'],
                           addtl_push=[mm_data] if mm_data else [],
                           extra_script_sig=b'\0' * extranonce_length))

        # Darkcoin payee amount
        if self._last_gbt.get('payee', '') != '':
            payout = self._last_gbt['coinbasevalue'] / 5
            self._last_gbt['coinbasevalue'] -= payout
            coinbase.outputs.append(
                Output.to_address(payout, self._last_gbt['payee']))
            self.logger.info("Paying out masternode at addr {}. Payout {}. Blockval reduced to {}"
                             .format(self._last_gbt['payee'], payout, self._last_gbt['coinbasevalue']))

        # simple output to the proper address and value
        coinbase.outputs.append(
            Output.to_address(self._last_gbt['coinbasevalue'], self.config['pool_address']))

        job_id = hexlify(struct.pack(str("I"), self._job_counter))
        bt_obj = BlockTemplate.from_gbt(self._last_gbt,
                                        coinbase,
                                        extranonce_length,
                                        [Transaction(unhexlify(t['data']), fees=t['fee'])
                                         for t in self._last_gbt['transactions']])
        # add in our merged mining data
        if mm_data:
            hashes = [bitcoin_data.hash256(tx.raw) for tx in bt_obj.transactions]
            bt_obj.merkle_link = bitcoin_data.calculate_merkle_link([None] + hashes, 0)
        bt_obj.merged_data = merged_data
        bt_obj.job_id = job_id
        bt_obj.diff1 = self.config['diff1']
        bt_obj.algo = self.config['algo']
        bt_obj.pow_block_hash = self.config['pow_block_hash']
        bt_obj.block_height = self._last_gbt['height']
        bt_obj.acc_shares = set()

        if push:
            if flush:
                self.logger.info("New work announced! Wiping previous jobs...")
                self.jobs.clear()
                self.latest_job = None
            else:
                self.logger.info("New work announced!")

        self._job_counter += 1
        self.jobs[job_id] = bt_obj
        self.latest_job = job_id
        if push:
            t = time.time()
            bt_obj.stratum_string()
            for idx, client in viewitems(self.stratum_manager.clients):
                try:
                    if client.authenticated:
                        client._push(bt_obj, flush=flush)
                except AttributeError:
                    pass
            self.logger.info("New job enqueued for transmission to {} users in {}"
                             .format(len(self.stratum_manager.clients),
                                     time_format(time.time() - t)))
            if flush:
                self.server['work_restarts'].incr()
            self.server['work_pushes'].incr()

        self.server['new_jobs'].incr()

        if new_block:
            hex_bits = hexlify(bt_obj.bits)
            self.current_net['difficulty'] = bits_to_difficulty(hex_bits)
            self.current_net['subsidy'] = bt_obj.total_value
            self.current_net['height'] = bt_obj.block_height - 1
            self.current_net['prev_hash'] = bt_obj.hashprev_be_hex
            self.current_net['transactions'] = len(bt_obj.transactions)
예제 #24
0
    def generate_job(self, push=False, flush=False, new_block=False):
        """ Creates a new job for miners to work on. Push will trigger an
        event that sends new work but doesn't force a restart. If flush is
        true a job restart will be triggered. """

        # aux monitors will often call this early when not needed at startup
        if not self._last_gbt:
            self.logger.warn("Cannot generate new job, missing last GBT info")
            return

        if self.merged_work:
            tree, size = bitcoin_data.make_auxpow_tree(self.merged_work)
            mm_hashes = [
                self.merged_work.get(tree.get(i), dict(hash=0))['hash']
                for i in xrange(size)
            ]
            mm_data = '\xfa\xbemm'
            mm_data += bitcoin_data.aux_pow_coinbase_type.pack(
                dict(
                    merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                    size=size,
                    nonce=0,
                ))
            merged_data = {}
            for aux_work in self.merged_work.itervalues():
                data = dict(target=aux_work['target'],
                            hash=aux_work['hash'],
                            height=aux_work['height'],
                            index=mm_hashes.index(aux_work['hash']),
                            type=aux_work['type'],
                            hashes=mm_hashes)
                merged_data[aux_work['type']] = data
        else:
            merged_data = {}
            mm_data = None

        self.logger.info("Generating new block template with {} trans. "
                         "Diff {:,.4f}. Subsidy {:,.2f}. Height {:,}. "
                         "Merged chains: {}".format(
                             len(self._last_gbt['transactions']),
                             bits_to_difficulty(self._last_gbt['bits']),
                             self._last_gbt['coinbasevalue'] / 100000000.0,
                             self._last_gbt['height'],
                             ', '.join(merged_data.keys())))

        # here we recalculate the current merkle branch and partial
        # coinbases for passing to the mining clients
        coinbase = Transaction()
        coinbase.version = 2
        # create a coinbase input with encoded height and padding for the
        # extranonces so script length is accurate
        extranonce_length = (self.config['extranonce_size'] +
                             self.config['extranonce_serv_size'])
        coinbase.inputs.append(
            Input.coinbase(self._last_gbt['height'],
                           addtl_push=[mm_data] if mm_data else [],
                           extra_script_sig=b'\0' * extranonce_length))

        # Darkcoin payee amount
        if self._last_gbt.get('payee', '') != '':
            payout = self._last_gbt['coinbasevalue'] / 5
            self._last_gbt['coinbasevalue'] -= payout
            coinbase.outputs.append(
                Output.to_address(payout, self._last_gbt['payee']))
            self.logger.info(
                "Paying out masternode at addr {}. Payout {}. Blockval reduced to {}"
                .format(self._last_gbt['payee'], payout,
                        self._last_gbt['coinbasevalue']))

        # simple output to the proper address and value
        coinbase.outputs.append(
            Output.to_address(self._last_gbt['coinbasevalue'],
                              self.config['pool_address']))

        job_id = hexlify(struct.pack(str("I"), self._job_counter))
        bt_obj = BlockTemplate.from_gbt(
            self._last_gbt, coinbase, extranonce_length, [
                Transaction(unhexlify(t['data']), fees=t['fee'])
                for t in self._last_gbt['transactions']
            ])
        # add in our merged mining data
        if mm_data:
            hashes = [
                bitcoin_data.hash256(tx.raw) for tx in bt_obj.transactions
            ]
            bt_obj.merkle_link = bitcoin_data.calculate_merkle_link([None] +
                                                                    hashes, 0)
        bt_obj.merged_data = merged_data
        bt_obj.job_id = job_id
        bt_obj.diff1 = self.config['diff1']
        bt_obj.algo = self.config['algo']
        bt_obj.pow_block_hash = self.config['pow_block_hash']
        bt_obj.block_height = self._last_gbt['height']
        bt_obj.acc_shares = set()

        if push:
            if flush:
                self.logger.info("New work announced! Wiping previous jobs...")
                self.jobs.clear()
                self.latest_job = None
            else:
                self.logger.info("New work announced!")

        self._job_counter += 1
        self.jobs[job_id] = bt_obj
        self.latest_job = job_id
        if push:
            t = time.time()
            bt_obj.stratum_string()
            for idx, client in viewitems(self.stratum_manager.clients):
                try:
                    if client.authenticated:
                        client._push(bt_obj, flush=flush)
                except AttributeError:
                    pass
            self.logger.info(
                "New job enqueued for transmission to {} users in {}".format(
                    len(self.stratum_manager.clients),
                    time_format(time.time() - t)))
            if flush:
                self.server['work_restarts'].incr()
            self.server['work_pushes'].incr()

        self.server['new_jobs'].incr()

        if new_block:
            hex_bits = hexlify(bt_obj.bits)
            self.current_net['difficulty'] = bits_to_difficulty(hex_bits)
            self.current_net['subsidy'] = bt_obj.total_value
            self.current_net['height'] = bt_obj.block_height - 1
            self.current_net['prev_hash'] = bt_obj.hashprev_be_hex
            self.current_net['transactions'] = len(bt_obj.transactions)
예제 #25
0
 def test_bits_to_diff(self):
     # assert a difficulty of zero returns the correct integer
     self.assertEquals(bits_to_difficulty("1d00ffff"), 1)
     self.assertEquals(int(bits_to_difficulty("1b3be743")), 1094)
예제 #26
0
 def difficulty(self):
     return bits_to_difficulty(self.bits)