示例#1
0
 def test_tx_hash(self):
     assert data.hash256(
         data.tx_type.pack(
             dict(
                 version=1,
                 tx_ins=[
                     dict(
                         previous_output=None,
                         sequence=None,
                         script=
                         '70736a0468860e1a0452389500522cfabe6d6d2b2f33cf8f6291b184f1b291d24d82229463fcec239afea0ee34b4bfc622f62401000000000000004d696e656420627920425443204775696c6420ac1eeeed88'
                         .decode('hex'),
                     )
                 ],
                 tx_outs=[
                     dict(
                         value=5003880250,
                         script=data.pubkey_hash_to_script2(
                             pack.IntType(160).unpack(
                                 'ca975b00a8c203b8692f5a18d92dc5c2d2ebc57b'.
                                 decode('hex'))),
                     )
                 ],
                 lock_time=0,
             ))
     ) == 0xb53802b2333e828d6532059f46ecf6b313a42d79f97925e457fbbfda45367e5c
示例#2
0
    def __init__(self, net, peer_addr, contents):
        self.net = net
        self.peer_addr = peer_addr
        self.contents = contents

        self.min_header = contents['min_header']
        self.share_info = contents['share_info']
        self.hash_link = contents['hash_link']
        self.merkle_link = contents['merkle_link']

        if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
            raise ValueError(
                '''bad coinbase size! %i bytes''' %
                (len(self.share_info['share_data']['coinbase']), ))

        if len(self.merkle_link['branch']) > 16:
            raise ValueError('merkle branch too long!')

        assert not self.hash_link['extra_data'], repr(
            self.hash_link['extra_data'])

        self.share_data = self.share_info['share_data']
        self.max_target = self.share_info['max_bits'].target
        self.target = self.share_info['bits'].target
        self.timestamp = self.share_info['timestamp']
        self.previous_hash = self.share_data['previous_share_hash']
        self.new_script = axe_data.pubkey_hash_to_script2(
            self.share_data['pubkey_hash'])
        self.desired_version = self.share_data['desired_version']
        self.absheight = self.share_info['absheight']
        self.abswork = self.share_info['abswork']

        n = set()
        for share_count, tx_count in self.iter_transaction_hash_refs():
            assert share_count < 110
            if share_count == 0:
                n.add(tx_count)
        assert n == set(xrange(len(self.share_info['new_transaction_hashes'])))

        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info,
                              contents['ref_merkle_link']) +
            pack.IntType(64).pack(self.contents['last_txout_nonce']) +
            pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = axe_data.check_merkle_link(self.gentx_hash,
                                                 self.merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(
            axe_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = net.PARENT.BLOCKHASH_FUNC(
            axe_data.block_header_type.pack(self.header))

        if self.target > net.MAX_TARGET:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share target invalid')

        if self.pow_hash > self.target:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share PoW invalid')

        self.new_transaction_hashes = self.share_info['new_transaction_hashes']

        # XXX eww
        self.time_seen = time.time()
示例#3
0
    def generate_transaction(cls,
                             tracker,
                             share_data,
                             block_target,
                             desired_timestamp,
                             desired_target,
                             ref_merkle_link,
                             desired_other_transaction_hashes_and_fees,
                             net,
                             known_txs=None,
                             last_txout_nonce=0,
                             base_subsidy=None):
        previous_share = tracker.items[
            share_data['previous_share_hash']] if share_data[
                'previous_share_hash'] is not None else None

        height, last = tracker.get_height_and_last(
            share_data['previous_share_hash'])
        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(
                tracker,
                share_data['previous_share_hash'],
                net.TARGET_LOOKBEHIND,
                min_work=True,
                integer=True)
            pre_target = 2**256 // (
                net.SHARE_PERIOD *
                attempts_per_second) - 1 if attempts_per_second else 2**256 - 1
            pre_target2 = math.clip(pre_target,
                                    (previous_share.max_target * 9 // 10,
                                     previous_share.max_target * 11 // 10))
            pre_target3 = math.clip(pre_target2,
                                    (net.MIN_TARGET, net.MAX_TARGET))
        max_bits = axe_data.FloatingInteger.from_target_upper_bound(
            pre_target3)
        bits = axe_data.FloatingInteger.from_target_upper_bound(
            math.clip(desired_target, (pre_target3 // 30, pre_target3)))

        new_transaction_hashes = []
        new_transaction_size = 0
        transaction_hash_refs = []
        other_transaction_hashes = []

        past_shares = list(
            tracker.get_chain(share_data['previous_share_hash'],
                              min(height, 100)))
        tx_hash_to_this = {}
        for i, share in enumerate(past_shares):
            for j, tx_hash in enumerate(share.new_transaction_hashes):
                if tx_hash not in tx_hash_to_this:
                    tx_hash_to_this[tx_hash] = [1 + i,
                                                j]  # share_count, tx_count
        for tx_hash, fee in desired_other_transaction_hashes_and_fees:
            if tx_hash in tx_hash_to_this:
                this = tx_hash_to_this[tx_hash]
            else:
                if known_txs is not None:
                    this_size = axe_data.tx_type.packed_size(
                        known_txs[tx_hash])
                    #if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
                    #    break
                    new_transaction_size += this_size
                new_transaction_hashes.append(tx_hash)
                this = [0, len(new_transaction_hashes) - 1]
            transaction_hash_refs.extend(this)
            other_transaction_hashes.append(tx_hash)

        included_transactions = set(other_transaction_hashes)
        removed_fees = [
            fee for tx_hash, fee in desired_other_transaction_hashes_and_fees
            if tx_hash not in included_transactions
        ]
        definite_fees = sum(
            0 if fee is None else fee
            for tx_hash, fee in desired_other_transaction_hashes_and_fees
            if tx_hash in included_transactions)
        if None not in removed_fees:
            share_data = dict(share_data,
                              subsidy=share_data['subsidy'] -
                              sum(removed_fees))
        else:
            assert base_subsidy is not None
            share_data = dict(share_data, subsidy=base_subsidy + definite_fees)

        weights, total_weight, donation_weight = tracker.get_cumulative_weights(
            previous_share.share_data['previous_share_hash']
            if previous_share is not None else None,
            max(0,
                min(height, net.REAL_CHAIN_LENGTH) - 1),
            65535 * net.SPREAD *
            axe_data.target_to_average_attempts(block_target),
        )
        assert total_weight == sum(weights.itervalues()) + donation_weight, (
            total_weight, sum(weights.itervalues()) + donation_weight)

        worker_payout = share_data['subsidy']

        payments = share_data['packed_payments']
        payments_tx = []
        if payments is not None:
            for obj in payments:
                pm_script = axe_data.address_to_script2(
                    obj['payee'], net.PARENT)
                pm_payout = obj['amount']
                if pm_payout > 0:
                    payments_tx += [dict(value=pm_payout, script=pm_script)]
                    worker_payout -= pm_payout

        amounts = dict(
            (script, worker_payout * (49 * weight) // (50 * total_weight))
            for script, weight in weights.iteritems(
            ))  # 98% goes according to weights prior to this share
        this_script = axe_data.pubkey_hash_to_script2(
            share_data['pubkey_hash'])
        amounts[this_script] = amounts.get(
            this_script, 0) + worker_payout // 50  # 2% goes to block finder
        amounts[DONATION_SCRIPT] = amounts.get(
            DONATION_SCRIPT, 0
        ) + worker_payout - sum(
            amounts.itervalues()
        )  # all that's left over is the donation weight and some extra satoshis due to rounding

        if sum(amounts.itervalues()) != worker_payout or any(
                x < 0 for x in amounts.itervalues()):
            raise ValueError()

        worker_scripts = sorted(
            [k for k in amounts.iterkeys() if k != DONATION_SCRIPT])
        worker_tx = [
            dict(value=amounts[script], script=script)
            for script in worker_scripts if amounts[script]
        ]

        donation_tx = [
            dict(value=amounts[DONATION_SCRIPT], script=DONATION_SCRIPT)
        ]

        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else
            tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,
            timestamp=math.clip(
                desired_timestamp,
                (
                    (previous_share.timestamp + net.SHARE_PERIOD) -
                    (net.SHARE_PERIOD - 1),  # = previous_share.timestamp + 1
                    (previous_share.timestamp + net.SHARE_PERIOD) +
                    (net.SHARE_PERIOD - 1),
                )) if previous_share is not None else desired_timestamp,
            new_transaction_hashes=new_transaction_hashes,
            transaction_hash_refs=transaction_hash_refs,
            absheight=((previous_share.absheight
                        if previous_share is not None else 0) + 1) % 2**32,
            abswork=(
                (previous_share.abswork if previous_share is not None else 0) +
                axe_data.target_to_average_attempts(bits.target)) % 2**128,
        )

        gentx = dict(
            version=1,
            tx_ins=[
                dict(
                    previous_output=None,
                    sequence=None,
                    script=share_data['coinbase'],
                )
            ],
            tx_outs=worker_tx + payments_tx + donation_tx + [
                dict(
                    value=0,
                    script='\x6a\x28' +
                    cls.get_ref_hash(net, share_info, ref_merkle_link) +
                    pack.IntType(64).pack(last_txout_nonce),
                )
            ],
            lock_time=0,
        )

        def get_share(header, last_txout_nonce=last_txout_nonce):
            min_header = dict(header)
            del min_header['merkle_root']
            share = cls(
                net, None,
                dict(
                    min_header=min_header,
                    share_info=share_info,
                    ref_merkle_link=dict(branch=[], index=0),
                    last_txout_nonce=last_txout_nonce,
                    hash_link=prefix_to_hash_link(
                        axe_data.tx_type.pack(gentx)[:-32 - 8 - 4],
                        cls.gentx_before_refhash),
                    merkle_link=axe_data.calculate_merkle_link(
                        [None] + other_transaction_hashes, 0),
                ))
            assert share.header == header  # checks merkle_root
            return share

        return share_info, gentx, other_transaction_hashes, get_share