示例#1
0
def getwork(dashd, net, use_getblocktemplate=False):
    def go():
        if use_getblocktemplate:
            return dashd.rpc_getblocktemplate(dict(mode='template'))
        else:
            return dashd.rpc_getmemorypool()

    try:
        start = time.time()
        work = yield go()
        end = time.time()
    except jsonrpc.Error_for_code(-32601):  # Method not found
        use_getblocktemplate = not use_getblocktemplate
        try:
            start = time.time()
            work = yield go()
            end = time.time()
        except jsonrpc.Error_for_code(-32601):  # Method not found
            print >> sys.stderr, 'Error: Dash version too old! Upgrade to v0.11.0.11 or newer!'
            raise deferral.RetrySilentlyException()
    packed_transactions = [
        (x['data'] if isinstance(x, dict) else x).decode('hex')
        for x in work['transactions']
    ]
    packed_votes = [(x['data'] if isinstance(x, dict) else x).decode('hex')
                    for x in work['votes']]
    if 'height' not in work:
        work['height'] = (yield dashd.rpc_getblock(
            work['previousblockhash']))['height'] + 1
    elif p2pool.DEBUG:
        assert work['height'] == (yield dashd.rpc_getblock(
            work['previousblockhash']))['height'] + 1
    defer.returnValue(
        dict(
            version=work['version'],
            previous_block=int(work['previousblockhash'], 16),
            transactions=map(dash_data.tx_type.unpack, packed_transactions),
            transaction_hashes=map(dash_data.hash256, packed_transactions),
            transaction_fees=[
                x.get('fee', None) if isinstance(x, dict) else None
                for x in work['transactions']
            ],
            subsidy=work['coinbasevalue'],
            time=work['time'] if 'time' in work else work['curtime'],
            bits=dash_data.FloatingIntegerType().unpack(
                work['bits'].decode('hex')[::-1]) if isinstance(
                    work['bits'],
                    (str,
                     unicode)) else dash_data.FloatingInteger(work['bits']),
            coinbaseflags=work['coinbaseflags'].decode('hex')
            if 'coinbaseflags' in work else ''.join(
                x.decode('hex') for x in work['coinbaseaux'].itervalues())
            if 'coinbaseaux' in work else '',
            height=work['height'],
            last_update=time.time(),
            use_getblocktemplate=use_getblocktemplate,
            latency=end - start,
            votes=map(dash_data.vote_type.unpack, packed_votes),
            payee=dash_data.address_to_pubkey_hash(work['payee'], net.PARENT)
            if (work['payee'] != '') else None,
            masternode_payments=work['masternode_payments'],
            payee_amount=work['payee_amount'] if
            (work['payee_amount'] != '') else work['coinbasevalue'] / 5,
        ))
示例#2
0
def getwork(dashd, net, use_getblocktemplate=True):
    def go():
        if use_getblocktemplate:
            return dashd.rpc_getblocktemplate(dict(mode='template'))
        else:
            return dashd.rpc_getmemorypool()

    try:
        start = time.time()
        work = yield go()
        end = time.time()
    except jsonrpc.Error_for_code(-32601):  # Method not found
        use_getblocktemplate = not use_getblocktemplate
        try:
            start = time.time()
            work = yield go()
            end = time.time()
        except jsonrpc.Error_for_code(-32601):  # Method not found
            print >> sys.stderr, 'Error: dash version too old! Upgrade to v0.11.2.17 or newer!'
            raise deferral.RetrySilentlyException()

    if work['transactions']:
        packed_transactions = [
            (x['data'] if isinstance(x, dict) else x).decode('hex')
            for x in work['transactions']
        ]
    else:
        packed_transactions = []
    if 'height' not in work:
        work['height'] = (yield dashd.rpc_getblock(
            work['previousblockhash']))['height'] + 1
    elif p2pool.DEBUG:
        assert work['height'] == (yield dashd.rpc_getblock(
            work['previousblockhash']))['height'] + 1

    # Dash Payments
    packed_payments = []
    payment_amount = 0

    payment_objects = []
    if 'masternode' in work:
        if isinstance(work['masternode'], list):
            payment_objects += work['masternode']
        else:
            payment_objects += [work['masternode']]
    if 'superblock' in work:
        payment_objects += work['superblock']

    for obj in payment_objects:
        g = {}
        if 'payee' in obj:
            g['payee'] = str(obj['payee'])
            g['amount'] = obj['amount']
            if g['amount'] > 0:
                payment_amount += g['amount']
                packed_payments.append(g)

    coinbase_payload = None
    if 'coinbase_payload' in work and len(work['coinbase_payload']) != 0:
        coinbase_payload = work['coinbase_payload'].decode('hex')

    defer.returnValue(
        dict(
            version=work['version'],
            previous_block=int(work['previousblockhash'], 16),
            transactions=map(dash_data.tx_type.unpack, packed_transactions),
            transaction_hashes=map(dash_data.hash256, packed_transactions),
            transaction_fees=[
                x.get('fee', None) if isinstance(x, dict) else None
                for x in work['transactions']
            ],
            subsidy=work['coinbasevalue'],
            time=work['time'] if 'time' in work else work['curtime'],
            bits=dash_data.FloatingIntegerType().unpack(
                work['bits'].decode('hex')[::-1]) if isinstance(
                    work['bits'],
                    (str,
                     unicode)) else dash_data.FloatingInteger(work['bits']),
            coinbaseflags=work['coinbaseflags'].decode('hex')
            if 'coinbaseflags' in work else ''.join(
                x.decode('hex') for x in work['coinbaseaux'].itervalues())
            if 'coinbaseaux' in work else '',
            height=work['height'],
            last_update=time.time(),
            use_getblocktemplate=use_getblocktemplate,
            latency=end - start,
            payment_amount=payment_amount,
            packed_payments=packed_payments,
            coinbase_payload=coinbase_payload,
        ))
示例#3
0
class Share(object):
    VERSION = 15
    VOTING_VERSION = 15
    SUCCESSOR = None

    small_block_header_type = pack.ComposedType([
        ('version', pack.VarIntType()),
        ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('timestamp', pack.IntType(32)),
        ('bits', dash_data.FloatingIntegerType()),
        ('nonce', pack.IntType(32)),
    ])

    share_info_type = pack.ComposedType([
        ('share_data',
         pack.ComposedType([
             ('previous_share_hash',
              pack.PossiblyNoneType(0, pack.IntType(256))),
             ('coinbase', pack.VarStrType()),
             ('nonce', pack.IntType(32)),
             ('pubkey_hash', pack.IntType(160)),
             ('subsidy', pack.IntType(64)),
             ('donation', pack.IntType(16)),
             ('stale_info',
              pack.EnumType(
                  pack.IntType(8),
                  dict((k, {
                      0: None,
                      253: 'orphan',
                      254: 'doa'
                  }.get(k, 'unk%i' % (k, ))) for k in xrange(256)))),
             ('desired_version', pack.VarIntType()),
             ('payment_amount', pack.IntType(64)),
             ('packed_payments',
              pack.ListType(
                  pack.ComposedType([
                      ('payee', pack.PossiblyNoneType('', pack.VarStrType())),
                      ('amount', pack.PossiblyNoneType(0, pack.IntType(64))),
                  ]))),
         ])),
        ('new_transaction_hashes', pack.ListType(pack.IntType(256))),
        ('transaction_hash_refs',
         pack.ListType(pack.VarIntType(),
                       2)),  # pairs of share_count, tx_count
        ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('max_bits', dash_data.FloatingIntegerType()),
        ('bits', dash_data.FloatingIntegerType()),
        ('timestamp', pack.IntType(32)),
        ('absheight', pack.IntType(32)),
        ('abswork', pack.IntType(128)),
    ])

    share_type = pack.ComposedType([
        ('min_header', small_block_header_type),
        ('share_info', share_info_type),
        ('ref_merkle_link',
         pack.ComposedType([
             ('branch', pack.ListType(pack.IntType(256))),
             ('index', pack.IntType(0)),
         ])),
        ('last_txout_nonce', pack.IntType(64)),
        ('hash_link', hash_link_type),
        (
            'merkle_link',
            pack.ComposedType([
                ('branch', pack.ListType(pack.IntType(256))),
                ('index', pack.IntType(0)),  # it will always be 0
            ])),
    ])

    ref_type = pack.ComposedType([
        ('identifier', pack.FixedStrType(64 // 8)),
        ('share_info', share_info_type),
    ])

    gentx_before_refhash = pack.VarStrType().pack(
        DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack(
            '\x6a\x28' + pack.IntType(256).pack(0) +
            pack.IntType(64).pack(0))[:3]

    @classmethod
    def generate_transaction(cls,
                             tracker,
                             share_data,
                             block_target,
                             desired_timestamp,
                             desired_target,
                             ref_merkle_link,
                             desired_other_transaction_hashes_and_fees,
                             net,
                             known_txs=None,
                             last_txout_nonce=0,
                             base_subsidy=None):
        previous_share = tracker.items[
            share_data['previous_share_hash']] if share_data[
                'previous_share_hash'] is not None else None

        height, last = tracker.get_height_and_last(
            share_data['previous_share_hash'])
        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(
                tracker,
                share_data['previous_share_hash'],
                net.TARGET_LOOKBEHIND,
                min_work=True,
                integer=True)
            pre_target = 2**256 // (
                net.SHARE_PERIOD *
                attempts_per_second) - 1 if attempts_per_second else 2**256 - 1
            pre_target2 = math.clip(pre_target,
                                    (previous_share.max_target * 9 // 10,
                                     previous_share.max_target * 11 // 10))
            pre_target3 = math.clip(pre_target2,
                                    (net.MIN_TARGET, net.MAX_TARGET))
        max_bits = dash_data.FloatingInteger.from_target_upper_bound(
            pre_target3)
        bits = dash_data.FloatingInteger.from_target_upper_bound(
            math.clip(desired_target, (pre_target3 // 30, pre_target3)))

        new_transaction_hashes = []
        new_transaction_size = 0
        transaction_hash_refs = []
        other_transaction_hashes = []

        past_shares = list(
            tracker.get_chain(share_data['previous_share_hash'],
                              min(height, 100)))
        tx_hash_to_this = {}
        for i, share in enumerate(past_shares):
            for j, tx_hash in enumerate(share.new_transaction_hashes):
                if tx_hash not in tx_hash_to_this:
                    tx_hash_to_this[tx_hash] = [1 + i,
                                                j]  # share_count, tx_count
        for tx_hash, fee in desired_other_transaction_hashes_and_fees:
            if tx_hash in tx_hash_to_this:
                this = tx_hash_to_this[tx_hash]
            else:
                if known_txs is not None:
                    this_size = dash_data.tx_type.packed_size(
                        known_txs[tx_hash])
                    #if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
                    #    break
                    new_transaction_size += this_size
                new_transaction_hashes.append(tx_hash)
                this = [0, len(new_transaction_hashes) - 1]
            transaction_hash_refs.extend(this)
            other_transaction_hashes.append(tx_hash)

        included_transactions = set(other_transaction_hashes)
        removed_fees = [
            fee for tx_hash, fee in desired_other_transaction_hashes_and_fees
            if tx_hash not in included_transactions
        ]
        definite_fees = sum(
            0 if fee is None else fee
            for tx_hash, fee in desired_other_transaction_hashes_and_fees
            if tx_hash in included_transactions)
        if None not in removed_fees:
            share_data = dict(share_data,
                              subsidy=share_data['subsidy'] -
                              sum(removed_fees))
        else:
            assert base_subsidy is not None
            share_data = dict(share_data, subsidy=base_subsidy + definite_fees)

        weights, total_weight, donation_weight = tracker.get_cumulative_weights(
            previous_share.share_data['previous_share_hash']
            if previous_share is not None else None,
            max(0,
                min(height, net.REAL_CHAIN_LENGTH) - 1),
            65535 * net.SPREAD *
            dash_data.target_to_average_attempts(block_target),
        )
        assert total_weight == sum(weights.itervalues()) + donation_weight, (
            total_weight, sum(weights.itervalues()) + donation_weight)

        worker_payout = share_data['subsidy']

        payments = share_data['packed_payments']
        payments_tx = []
        if payments is not None:
            for obj in payments:
                pm_script = dash_data.address_to_script2(
                    obj['payee'], net.PARENT)
                pm_payout = obj['amount']
                if pm_payout > 0:
                    payments_tx += [dict(value=pm_payout, script=pm_script)]
                    worker_payout -= pm_payout

        amounts = dict(
            (script, worker_payout * (49 * weight) // (50 * total_weight))
            for script, weight in weights.iteritems(
            ))  # 98% goes according to weights prior to this share
        this_script = dash_data.pubkey_hash_to_script2(
            share_data['pubkey_hash'])
        amounts[this_script] = amounts.get(
            this_script, 0) + worker_payout // 50  # 2% goes to block finder
        amounts[DONATION_SCRIPT] = amounts.get(
            DONATION_SCRIPT, 0
        ) + worker_payout - sum(
            amounts.itervalues()
        )  # all that's left over is the donation weight and some extra satoshis due to rounding

        if sum(amounts.itervalues()) != worker_payout or any(
                x < 0 for x in amounts.itervalues()):
            raise ValueError()

        worker_scripts = sorted(
            [k for k in amounts.iterkeys() if k != DONATION_SCRIPT])
        worker_tx = [
            dict(value=amounts[script], script=script)
            for script in worker_scripts if amounts[script]
        ]

        donation_tx = [
            dict(value=amounts[DONATION_SCRIPT], script=DONATION_SCRIPT)
        ]

        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else
            tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,
            timestamp=math.clip(
                desired_timestamp,
                (
                    (previous_share.timestamp + net.SHARE_PERIOD) -
                    (net.SHARE_PERIOD - 1),  # = previous_share.timestamp + 1
                    (previous_share.timestamp + net.SHARE_PERIOD) +
                    (net.SHARE_PERIOD - 1),
                )) if previous_share is not None else desired_timestamp,
            new_transaction_hashes=new_transaction_hashes,
            transaction_hash_refs=transaction_hash_refs,
            absheight=((previous_share.absheight
                        if previous_share is not None else 0) + 1) % 2**32,
            abswork=(
                (previous_share.abswork if previous_share is not None else 0) +
                dash_data.target_to_average_attempts(bits.target)) % 2**128,
        )

        gentx = dict(
            version=1,
            tx_ins=[
                dict(
                    previous_output=None,
                    sequence=None,
                    script=share_data['coinbase'],
                )
            ],
            tx_outs=worker_tx + payments_tx + donation_tx + [
                dict(
                    value=0,
                    script='\x6a\x28' +
                    cls.get_ref_hash(net, share_info, ref_merkle_link) +
                    pack.IntType(64).pack(last_txout_nonce),
                )
            ],
            lock_time=0,
        )

        def get_share(header, last_txout_nonce=last_txout_nonce):
            min_header = dict(header)
            del min_header['merkle_root']
            share = cls(
                net, None,
                dict(
                    min_header=min_header,
                    share_info=share_info,
                    ref_merkle_link=dict(branch=[], index=0),
                    last_txout_nonce=last_txout_nonce,
                    hash_link=prefix_to_hash_link(
                        dash_data.tx_type.pack(gentx)[:-32 - 8 - 4],
                        cls.gentx_before_refhash),
                    merkle_link=dash_data.calculate_merkle_link(
                        [None] + other_transaction_hashes, 0),
                ))
            assert share.header == header  # checks merkle_root
            return share

        return share_info, gentx, other_transaction_hashes, get_share

    @classmethod
    def get_ref_hash(cls, net, share_info, ref_merkle_link):
        return pack.IntType(256).pack(
            dash_data.check_merkle_link(
                dash_data.hash256(
                    cls.ref_type.pack(
                        dict(
                            identifier=net.IDENTIFIER,
                            share_info=share_info,
                        ))), ref_merkle_link))

    __slots__ = 'net peer_addr contents min_header share_info hash_link merkle_link hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen absheight abswork'.split(
        ' ')

    def __init__(self, net, peer_addr, contents):
        self.net = net
        self.peer_addr = peer_addr
        self.contents = contents

        self.min_header = contents['min_header']
        self.share_info = contents['share_info']
        self.hash_link = contents['hash_link']
        self.merkle_link = contents['merkle_link']

        if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
            raise ValueError(
                '''bad coinbase size! %i bytes''' %
                (len(self.share_info['share_data']['coinbase']), ))

        if len(self.merkle_link['branch']) > 16:
            raise ValueError('merkle branch too long!')

        assert not self.hash_link['extra_data'], repr(
            self.hash_link['extra_data'])

        self.share_data = self.share_info['share_data']
        self.max_target = self.share_info['max_bits'].target
        self.target = self.share_info['bits'].target
        self.timestamp = self.share_info['timestamp']
        self.previous_hash = self.share_data['previous_share_hash']
        self.new_script = dash_data.pubkey_hash_to_script2(
            self.share_data['pubkey_hash'])
        self.desired_version = self.share_data['desired_version']
        self.absheight = self.share_info['absheight']
        self.abswork = self.share_info['abswork']

        n = set()
        for share_count, tx_count in self.iter_transaction_hash_refs():
            assert share_count < 110
            if share_count == 0:
                n.add(tx_count)
        assert n == set(xrange(len(self.share_info['new_transaction_hashes'])))

        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info,
                              contents['ref_merkle_link']) +
            pack.IntType(64).pack(self.contents['last_txout_nonce']) +
            pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = dash_data.check_merkle_link(self.gentx_hash,
                                                  self.merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(
            dash_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = net.PARENT.BLOCKHASH_FUNC(
            dash_data.block_header_type.pack(self.header))

        if self.target > net.MAX_TARGET:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share target invalid')

        if self.pow_hash > self.target:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share PoW invalid')

        self.new_transaction_hashes = self.share_info['new_transaction_hashes']

        # XXX eww
        self.time_seen = time.time()

    def __repr__(self):
        return 'Share' + repr((self.net, self.peer_addr, self.contents))

    def as_share(self):
        return dict(type=self.VERSION,
                    contents=self.share_type.pack(self.contents))

    def iter_transaction_hash_refs(self):
        return zip(self.share_info['transaction_hash_refs'][::2],
                   self.share_info['transaction_hash_refs'][1::2])

    def check(self, tracker):
        from p2pool import p2p
        if self.share_data['previous_share_hash'] is not None:
            previous_share = tracker.items[
                self.share_data['previous_share_hash']]
            if type(self) is type(previous_share):
                pass
            elif type(self) is type(previous_share).SUCCESSOR:
                if tracker.get_height(
                        previous_share.hash) < self.net.CHAIN_LENGTH:
                    from p2pool import p2p
                    raise p2p.PeerMisbehavingError(
                        'switch without enough history')

                # switch only valid if 85% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version
                counts = get_desired_version_counts(
                    tracker,
                    tracker.get_nth_parent_hash(
                        previous_share.hash, self.net.CHAIN_LENGTH * 9 // 10),
                    self.net.CHAIN_LENGTH // 10)
                if counts.get(self.VERSION,
                              0) < sum(counts.itervalues()) * 85 // 100:
                    raise p2p.PeerMisbehavingError(
                        'switch without enough hash power upgraded')
            else:
                raise p2p.PeerMisbehavingError(
                    '''%s can't follow %s''' %
                    (type(self).__name__, type(previous_share).__name__))

        other_tx_hashes = [
            tracker.items[tracker.get_nth_parent_hash(
                self.hash,
                share_count)].share_info['new_transaction_hashes'][tx_count]
            for share_count, tx_count in self.iter_transaction_hash_refs()
        ]

        share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(
            tracker,
            self.share_info['share_data'],
            self.header['bits'].target,
            self.share_info['timestamp'],
            self.share_info['bits'].target,
            self.contents['ref_merkle_link'],
            [(h, None) for h in other_tx_hashes],
            self.net,
            last_txout_nonce=self.contents['last_txout_nonce'])
        assert other_tx_hashes2 == other_tx_hashes
        if share_info != self.share_info:
            raise ValueError('share_info invalid')
        if dash_data.hash256(dash_data.tx_type.pack(gentx)) != self.gentx_hash:
            raise ValueError('''gentx doesn't match hash_link''')

        if dash_data.calculate_merkle_link([None] + other_tx_hashes,
                                           0) != self.merkle_link:
            raise ValueError('merkle_link and other_tx_hashes do not match')

        return gentx  # only used by as_block

    def get_other_tx_hashes(self, tracker):
        parents_needed = max(
            share_count
            for share_count, tx_count in self.iter_transaction_hash_refs(
            )) if self.share_info['transaction_hash_refs'] else 0
        parents = tracker.get_height(self.hash) - 1
        if parents < parents_needed:
            return None
        last_shares = list(tracker.get_chain(self.hash, parents_needed + 1))
        return [
            last_shares[share_count].share_info['new_transaction_hashes']
            [tx_count]
            for share_count, tx_count in self.iter_transaction_hash_refs()
        ]

    def _get_other_txs(self, tracker, known_txs):
        other_tx_hashes = self.get_other_tx_hashes(tracker)
        if other_tx_hashes is None:
            return None  # not all parents present

        if not all(tx_hash in known_txs for tx_hash in other_tx_hashes):
            return None  # not all txs present

        return [known_txs[tx_hash] for tx_hash in other_tx_hashes]

    def should_punish_reason(self, previous_block, bits, tracker, known_txs):
        if (self.header['previous_block'], self.header['bits']) != (
                previous_block, bits
        ) and self.header_hash != previous_block and self.peer_addr is not None:
            return True, 'Block-stale detected! height(%x) < height(%x) or %08x != %08x' % (
                self.header['previous_block'], previous_block,
                self.header['bits'].bits, bits.bits)

        if self.pow_hash <= self.header['bits'].target:
            return -1, 'block solution'

        other_txs = self._get_other_txs(tracker, known_txs)
        if other_txs is None:
            pass
        else:
            all_txs_size = sum(
                dash_data.tx_type.packed_size(tx) for tx in other_txs)
            if all_txs_size > 2000000:
                return True, 'txs over block size limit'
            '''
            new_txs_size = sum(dash_data.tx_type.packed_size(known_txs[tx_hash]) for tx_hash in self.share_info['new_transaction_hashes'])
            if new_txs_size > 50000:
                return True, 'new txs over limit'
            '''

        return False, None

    def as_block(self, tracker, known_txs):
        other_txs = self._get_other_txs(tracker, known_txs)
        if other_txs is None:
            return None  # not all txs present
        return dict(header=self.header, txs=[self.check(tracker)] + other_txs)