Esempio n. 1
0
 def get_dynamic_types(cls, net):
     if not cls.cached_types == None:
         return cls.cached_types
     t = dict(share_info_type=None, share_type=None, ref_type=None)
     segwit_data = ('segwit_data', pack.PossiblyNoneType(dict(txid_merkle_link=dict(branch=[], index=0), wtxid_merkle_root=2**256-1), pack.ComposedType([
         ('txid_merkle_link', pack.ComposedType([
             ('branch', pack.ListType(pack.IntType(256))),
             ('index', pack.IntType(0)), # it will always be 0
         ])),
         ('wtxid_merkle_root', pack.IntType(256))
     ])))
     t['share_info_type'] = pack.ComposedType([
         ('share_data', pack.ComposedType([
             ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
             ('coinbase', pack.VarStrType()),
             ('nonce', pack.IntType(32)),
             ('pubkey_hash', pack.IntType(160)),
             ('subsidy', pack.IntType(64)),
             ('donation', pack.IntType(16)),
             ('stale_info', pack.EnumType(pack.IntType(8), dict((k, {0: None, 253: 'orphan', 254: 'doa'}.get(k, 'unk%i' % (k,))) for k in xrange(256)))),
             ('desired_version', pack.VarIntType()),
         ]))] + ([segwit_data] if is_segwit_activated(cls.VERSION, net) else []) + [
         ('new_transaction_hashes', pack.ListType(pack.IntType(256))),
         ('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2)), # pairs of share_count, tx_count
         ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
         ('max_bits', bitcoin_data.FloatingIntegerType()),
         ('bits', bitcoin_data.FloatingIntegerType()),
         ('timestamp', pack.IntType(32)),
         ('absheight', pack.IntType(32)),
         ('abswork', pack.IntType(128)),
     ])
     t['share_type'] = pack.ComposedType([
         ('min_header', cls.small_block_header_type),
         ('share_info', t['share_info_type']),
         ('ref_merkle_link', pack.ComposedType([
             ('branch', pack.ListType(pack.IntType(256))),
             ('index', pack.IntType(0)),
         ])),
         ('last_txout_nonce', pack.IntType(64)),
         ('hash_link', hash_link_type),
         ('merkle_link', pack.ComposedType([
             ('branch', pack.ListType(pack.IntType(256))),
             ('index', pack.IntType(0)), # it will always be 0
         ])),
     ])
     t['ref_type'] = pack.ComposedType([
         ('identifier', pack.FixedStrType(64//8)),
         ('share_info', t['share_info_type']),
     ])
     cls.cached_types = t
     return t
Esempio n. 2
0
class Protocol(p2protocol.Protocol):
    def __init__(self, net):
        p2protocol.Protocol.__init__(self,
                                     net.P2P_PREFIX,
                                     1000000,
                                     ignore_trailing_payload=True)

    def connectionMade(self):
        self.send_version(
            version=70003,
            services=1,
            time=int(time.time()),
            addr_to=dict(
                services=1,
                address=self.transport.getPeer().host,
                port=self.transport.getPeer().port,
            ),
            addr_from=dict(
                services=1,
                address=self.transport.getHost().host,
                port=self.transport.getHost().port,
            ),
            nonce=random.randrange(2**64),
            sub_version_num='/P2Pool:%s/' % (p2pool.__version__, ),
            start_height=0,
        )

    message_version = pack.ComposedType([
        ('version', pack.IntType(32)),
        ('services', pack.IntType(64)),
        ('time', pack.IntType(64)),
        ('addr_to', bitcoin_data.address_type),
        ('addr_from', bitcoin_data.address_type),
        ('nonce', pack.IntType(64)),
        ('sub_version_num', pack.VarStrType()),
        ('start_height', pack.IntType(32)),
    ])

    def handle_version(self, version, services, time, addr_to, addr_from,
                       nonce, sub_version_num, start_height):
        self.send_verack()

    message_verack = pack.ComposedType([])

    def handle_verack(self):
        self.get_block = deferral.ReplyMatcher(lambda hash: self.send_getdata(
            requests=[dict(type='block', hash=hash)]))
        self.get_block_header = deferral.ReplyMatcher(
            lambda hash: self.send_getheaders(version=1, have=[], last=hash))

        if hasattr(self.factory, 'resetDelay'):
            self.factory.resetDelay()
        if hasattr(self.factory, 'gotConnection'):
            self.factory.gotConnection(self)

        self.pinger = deferral.RobustLoopingCall(self.send_ping, nonce=1234)
        self.pinger.start(30)

    message_inv = pack.ComposedType([
        ('invs',
         pack.ListType(
             pack.ComposedType([
                 ('type', pack.EnumType(pack.IntType(32), {
                     1: 'tx',
                     2: 'block'
                 })),
                 ('hash', pack.IntType(256)),
             ]))),
    ])

    def handle_inv(self, invs):
        for inv in invs:
            if inv['type'] == 'tx':
                self.send_getdata(requests=[inv])
            elif inv['type'] == 'block':
                self.factory.new_block.happened(inv['hash'])
            else:
                print 'Unknown inv type', inv

    message_getdata = pack.ComposedType([
        ('requests',
         pack.ListType(
             pack.ComposedType([
                 ('type', pack.EnumType(pack.IntType(32), {
                     1: 'tx',
                     2: 'block'
                 })),
                 ('hash', pack.IntType(256)),
             ]))),
    ])
    message_getblocks = pack.ComposedType([
        ('version', pack.IntType(32)),
        ('have', pack.ListType(pack.IntType(256))),
        ('last', pack.PossiblyNoneType(0, pack.IntType(256))),
    ])
    message_getheaders = pack.ComposedType([
        ('version', pack.IntType(32)),
        ('have', pack.ListType(pack.IntType(256))),
        ('last', pack.PossiblyNoneType(0, pack.IntType(256))),
    ])
    message_getaddr = pack.ComposedType([])

    message_addr = pack.ComposedType([
        ('addrs',
         pack.ListType(
             pack.ComposedType([
                 ('timestamp', pack.IntType(32)),
                 ('address', bitcoin_data.address_type),
             ]))),
    ])

    def handle_addr(self, addrs):
        for addr in addrs:
            pass

    message_tx = pack.ComposedType([
        ('tx', bitcoin_data.tx_type),
    ])

    def handle_tx(self, tx):
        self.factory.new_tx.happened(tx)

    message_block = pack.ComposedType([
        ('block', bitcoin_data.block_type),
    ])

    def handle_block(self, block):
        block_hash = bitcoin_data.hash256(
            bitcoin_data.block_header_type.pack(block['header']))
        self.get_block.got_response(block_hash, block)
        self.get_block_header.got_response(block_hash, block['header'])

    message_headers = pack.ComposedType([
        ('headers', pack.ListType(bitcoin_data.block_type)),
    ])

    def handle_headers(self, headers):
        for header in headers:
            header = header['header']
            self.get_block_header.got_response(
                bitcoin_data.hash256(
                    bitcoin_data.block_header_type.pack(header)), header)
        self.factory.new_headers.happened(
            [header['header'] for header in headers])

    message_ping = pack.ComposedType([
        ('nonce', pack.IntType(64)),
    ])

    def handle_ping(self, nonce):
        self.send_pong(nonce=nonce)

    message_pong = pack.ComposedType([
        ('nonce', pack.IntType(64)),
    ])

    def handle_pong(self, nonce):
        pass

    message_alert = pack.ComposedType([
        ('message', pack.VarStrType()),
        ('signature', pack.VarStrType()),
    ])

    def handle_alert(self, message, signature):
        pass  # print 'ALERT:', (message, signature)

    def connectionLost(self, reason):
        if hasattr(self.factory, 'gotConnection'):
            self.factory.gotConnection(None)
        if hasattr(self, 'pinger'):
            self.pinger.stop()
        if p2pool.DEBUG:
            print >> sys.stderr, 'Bitcoin connection lost. Reason:', reason.getErrorMessage(
            )
Esempio n. 3
0
address_type = pack.ComposedType([
    ('services', pack.IntType(64)),
    ('address', pack.IPV6AddressType()),
    ('port', pack.IntType(16, 'big')),
])

tx_type = pack.ComposedType([
    ('version', pack.IntType(32)),
    ('tx_ins',
     pack.ListType(
         pack.ComposedType([
             ('previous_output',
              pack.PossiblyNoneType(
                  dict(hash=0, index=2**32 - 1),
                  pack.ComposedType([
                      ('hash', pack.IntType(256)),
                      ('index', pack.IntType(32)),
                  ]))),
             ('script', pack.VarStrType()),
             ('sequence', pack.PossiblyNoneType(2**32 - 1, pack.IntType(32))),
         ]))),
    ('tx_outs',
     pack.ListType(
         pack.ComposedType([
             ('value', pack.IntType(64)),
             ('script', pack.VarStrType()),
         ]))),
    ('lock_time', pack.IntType(32)),
])

vote_type = pack.ComposedType([
Esempio n. 4
0
class Share(object):
    VERSION = 13
    VOTING_VERSION = 13
    SUCCESSOR = None
    
    small_block_header_type = pack.ComposedType([
        ('version', pack.VarIntType()),
        ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('timestamp', pack.IntType(32)),
        ('bits', bitcoin_data.FloatingIntegerType()),
        ('nonce', pack.IntType(32)),
    ])
    
    share_info_type = pack.ComposedType([
        ('share_data', pack.ComposedType([
            ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
            ('coinbase', pack.VarStrType()),
            ('nonce', pack.IntType(32)),
            ('pubkey_hash', pack.IntType(160)),
            ('subsidy', pack.IntType(64)),
            ('donation', pack.IntType(16)),
            ('stale_info', pack.EnumType(pack.IntType(8), dict((k, {0: None, 253: 'orphan', 254: 'doa'}.get(k, 'unk%i' % (k,))) for k in xrange(256)))),
            ('desired_version', pack.VarIntType()),
        ])),
        ('new_transaction_hashes', pack.ListType(pack.IntType(256))),
        ('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2)), # pairs of share_count, tx_count
        ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('max_bits', bitcoin_data.FloatingIntegerType()),
        ('bits', bitcoin_data.FloatingIntegerType()),
        ('timestamp', pack.IntType(32)),
        ('absheight', pack.IntType(32)),
        ('abswork', pack.IntType(128)),
    ])
    
    share_type = pack.ComposedType([
        ('min_header', small_block_header_type),
        ('share_info', share_info_type),
        ('ref_merkle_link', pack.ComposedType([
            ('branch', pack.ListType(pack.IntType(256))),
            ('index', pack.IntType(0)),
        ])),
        ('last_txout_nonce', pack.IntType(64)),
        ('hash_link', hash_link_type),
        ('merkle_link', pack.ComposedType([
            ('branch', pack.ListType(pack.IntType(256))),
            ('index', pack.IntType(0)), # it will always be 0
        ])),
    ])
    
    ref_type = pack.ComposedType([
        ('identifier', pack.FixedStrType(64//8)),
        ('share_info', share_info_type),
    ])
    
    gentx_before_refhash = pack.VarStrType().pack(DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack('\x6a\x28' + pack.IntType(256).pack(0) + pack.IntType(64).pack(0))[:3]
    
    @classmethod
    def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None):
        previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
        
        height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
            pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
            pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
            pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET))
        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//30, pre_target3)))
        
        new_transaction_hashes = []
        new_transaction_size = 0
        transaction_hash_refs = []
        other_transaction_hashes = []
        
        past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100)))
        tx_hash_to_this = {}
        for i, share in enumerate(past_shares):
            for j, tx_hash in enumerate(share.new_transaction_hashes):
                if tx_hash not in tx_hash_to_this:
                    tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count
        for tx_hash, fee in desired_other_transaction_hashes_and_fees:
            if tx_hash in tx_hash_to_this:
                this = tx_hash_to_this[tx_hash]
            else:
                if known_txs is not None:
                    this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
                    if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
                        break
                    new_transaction_size += this_size
                new_transaction_hashes.append(tx_hash)
                this = [0, len(new_transaction_hashes)-1]
            transaction_hash_refs.extend(this)
            other_transaction_hashes.append(tx_hash)
        
        included_transactions = set(other_transaction_hashes)
        removed_fees = [fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions]
        definite_fees = sum(0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions)
        if None not in removed_fees:
            share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees))
        else:
            assert base_subsidy is not None
            share_data = dict(share_data, subsidy=base_subsidy + definite_fees)
        
        weights, total_weight, donation_weight = tracker.get_cumulative_weights(previous_share.share_data['previous_share_hash'] if previous_share is not None else None,
            min(height, net.REAL_CHAIN_LENGTH-1),
            65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
        )
        assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
        
        amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
        this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
        amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
        amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
        
        if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
            raise ValueError()
        
        dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit
        
        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,
            timestamp=math.clip(desired_timestamp, (
                (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
                (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
            )) if previous_share is not None else desired_timestamp,
            new_transaction_hashes=new_transaction_hashes,
            transaction_hash_refs=transaction_hash_refs,
            absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32,
            abswork=((previous_share.abswork if previous_share is not None else 0) + bitcoin_data.target_to_average_attempts(bits.target)) % 2**128,
        )
        
        gentx = dict(
            version=1,
            tx_ins=[dict(
                previous_output=None,
                sequence=None,
                script=share_data['coinbase'],
            )],
            tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT] + [dict(
                value=0,
                script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce),
            )],
            lock_time=0,
        )
        
        def get_share(header, last_txout_nonce=last_txout_nonce):
            min_header = dict(header); del min_header['merkle_root']
            share = cls(net, None, dict(
                min_header=min_header,
                share_info=share_info,
                ref_merkle_link=dict(branch=[], index=0),
                last_txout_nonce=last_txout_nonce,
                hash_link=prefix_to_hash_link(bitcoin_data.tx_type.pack(gentx)[:-32-8-4], cls.gentx_before_refhash),
                merkle_link=bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0),
            ))
            assert share.header == header # checks merkle_root
            return share
        
        return share_info, gentx, other_transaction_hashes, get_share
    
    @classmethod
    def get_ref_hash(cls, net, share_info, ref_merkle_link):
        return pack.IntType(256).pack(bitcoin_data.check_merkle_link(bitcoin_data.hash256(cls.ref_type.pack(dict(
            identifier=net.IDENTIFIER,
            share_info=share_info,
        ))), ref_merkle_link))
    
    __slots__ = 'net peer_addr contents min_header share_info hash_link merkle_link hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen absheight abswork'.split(' ')
    
    def __init__(self, net, peer_addr, contents):
        self.net = net
        self.peer_addr = peer_addr
        self.contents = contents
        
        self.min_header = contents['min_header']
        self.share_info = contents['share_info']
        self.hash_link = contents['hash_link']
        self.merkle_link = contents['merkle_link']
        
        if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
            raise ValueError('''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']),))
        
        if len(self.merkle_link['branch']) > 16:
            raise ValueError('merkle branch too long!')
        
        assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data'])
        
        self.share_data = self.share_info['share_data']
        self.max_target = self.share_info['max_bits'].target
        self.target = self.share_info['bits'].target
        self.timestamp = self.share_info['timestamp']
        self.previous_hash = self.share_data['previous_share_hash']
        self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
        self.desired_version = self.share_data['desired_version']
        self.absheight = self.share_info['absheight']
        self.abswork = self.share_info['abswork']
        
        n = set()
        for share_count, tx_count in self.iter_transaction_hash_refs():
            assert share_count < 110
            if share_count == 0:
                n.add(tx_count)
        assert n == set(range(len(self.share_info['new_transaction_hashes'])))
        
        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
        
        if self.target > net.MAX_TARGET:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share target invalid')
        
        if self.pow_hash > self.target:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share PoW invalid')
        
        self.new_transaction_hashes = self.share_info['new_transaction_hashes']
        
        # XXX eww
        self.time_seen = time.time()
    
    def __repr__(self):
        return 'Share' + repr((self.net, self.peer_addr, self.contents))
    
    def as_share(self):
        return dict(type=self.VERSION, contents=self.share_type.pack(self.contents))
    
    def iter_transaction_hash_refs(self):
        return zip(self.share_info['transaction_hash_refs'][::2], self.share_info['transaction_hash_refs'][1::2])
    
    def check(self, tracker):
        from p2pool import p2p
        if self.share_data['previous_share_hash'] is not None:
            previous_share = tracker.items[self.share_data['previous_share_hash']]
            if type(self) is type(previous_share):
                pass
            elif type(self) is type(previous_share).SUCCESSOR:
                if tracker.get_height(previous_share.hash) < self.net.CHAIN_LENGTH:
                    from p2pool import p2p
                    raise p2p.PeerMisbehavingError('switch without enough history')
                
                # switch only valid if 85% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version
                counts = get_desired_version_counts(tracker,
                    tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10)
                if counts.get(self.VERSION, 0) < sum(counts.itervalues())*85//100:
                    raise p2p.PeerMisbehavingError('switch without enough hash power upgraded')
            else:
                raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__))
        
        other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
        
        share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net, last_txout_nonce=self.contents['last_txout_nonce'])
        assert other_tx_hashes2 == other_tx_hashes
        if share_info != self.share_info:
            raise ValueError('share_info invalid')
        if bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash:
            raise ValueError('''gentx doesn't match hash_link''')
        
        if bitcoin_data.calculate_merkle_link([None] + other_tx_hashes, 0) != self.merkle_link:
            raise ValueError('merkle_link and other_tx_hashes do not match')
        
        return gentx # only used by as_block
    
    def get_other_tx_hashes(self, tracker):
        parents_needed = max(share_count for share_count, tx_count in self.iter_transaction_hash_refs()) if self.share_info['transaction_hash_refs'] else 0
        parents = tracker.get_height(self.hash) - 1
        if parents < parents_needed:
            return None
        last_shares = list(tracker.get_chain(self.hash, parents_needed + 1))
        return [last_shares[share_count].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
    
    def _get_other_txs(self, tracker, known_txs):
        other_tx_hashes = self.get_other_tx_hashes(tracker)
        if other_tx_hashes is None:
            return None # not all parents present
        
        if not all(tx_hash in known_txs for tx_hash in other_tx_hashes):
            return None # not all txs present
        
        return [known_txs[tx_hash] for tx_hash in other_tx_hashes]
    
    def should_punish_reason(self, previous_block, bits, tracker, known_txs):
        if (self.header['previous_block'], self.header['bits']) != (previous_block, bits) and self.header_hash != previous_block and self.peer_addr is not None:
            return True, 'Block-stale detected! height(%x) < height(%x) or %08x != %08x' % (self.header['previous_block'], previous_block, self.header['bits'].bits, bits.bits)
        
        if self.pow_hash <= self.header['bits'].target:
            return -1, 'block solution'
        
        other_txs = self._get_other_txs(tracker, known_txs)
        if other_txs is None:
            pass
        else:
            all_txs_size = sum(bitcoin_data.tx_type.packed_size(tx) for tx in other_txs)
            if all_txs_size > 1000000:
                return True, 'txs over block size limit'
            
            new_txs_size = sum(bitcoin_data.tx_type.packed_size(known_txs[tx_hash]) for tx_hash in self.share_info['new_transaction_hashes'])
            if new_txs_size > 50000:
                return True, 'new txs over limit'
        
        return False, None
    
    def as_block(self, tracker, known_txs):
        other_txs = self._get_other_txs(tracker, known_txs)
        if other_txs is None:
            return None # not all txs present
        return dict(header=self.header, txs=[self.check(tracker)] + other_txs)
Esempio n. 5
0
                ])
            else:
                if p2pool.DEBUG:
                    print 'Advertising for incoming connections'
                # Ask peer to advertise what it believes our IP address to be
                self.send_addrme(port=port)

    message_version = pack.ComposedType([
        ('version', pack.IntType(32)),
        ('services', pack.IntType(64)),
        ('addr_to', bitcoin_data.address_type),
        ('addr_from', bitcoin_data.address_type),
        ('nonce', pack.IntType(64)),
        ('sub_version', pack.VarStrType()),
        ('mode', pack.IntType(32)),  # always 1 for legacy compatibility
        ('best_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
    ])

    def handle_version(self, version, services, addr_to, addr_from, nonce,
                       sub_version, mode, best_share_hash):
        if self.other_version is not None:
            raise PeerMisbehavingError('more than one version message')
        if version < 1300:
            raise PeerMisbehavingError('peer too old')

        self.other_version = version
        self.other_sub_version = sub_version[:512]
        self.other_services = services

        if nonce == self.node.nonce:
            raise PeerMisbehavingError('was connected to self')
Esempio n. 6
0
class Protocol(p2protocol.Protocol):
    def __init__(self, net):
        p2protocol.Protocol.__init__(self,
                                     net.P2P_PREFIX,
                                     1000000,
                                     ignore_trailing_payload=True)
        self.net = net

    def connectionMade(self):
        self.send_version(
            version=70206,
            services=1,
            time=int(time.time()),
            addr_to=dict(
                services=1,
                address=self.transport.getPeer().host,
                port=self.transport.getPeer().port,
            ),
            addr_from=dict(
                services=1,
                address=self.transport.getHost().host,
                port=self.transport.getHost().port,
            ),
            nonce=random.randrange(2**64),
            sub_version_num='/P2Pool:%s/' % (p2pool.__version__, ),
            start_height=0,
        )

    message_version = pack.ComposedType([
        ('version', pack.IntType(32)),
        ('services', pack.IntType(64)),
        ('time', pack.IntType(64)),
        ('addr_to', dash_data.address_type),
        ('addr_from', dash_data.address_type),
        ('nonce', pack.IntType(64)),
        ('sub_version_num', pack.VarStrType()),
        ('start_height', pack.IntType(32)),
    ])

    def handle_version(self, version, services, time, addr_to, addr_from,
                       nonce, sub_version_num, start_height):
        self.send_verack()

    message_verack = pack.ComposedType([])

    def handle_verack(self):
        self.get_block = deferral.ReplyMatcher(lambda hash: self.send_getdata(
            requests=[dict(type='block', hash=hash)]))
        self.get_block_header = deferral.ReplyMatcher(
            lambda hash: self.send_getheaders(version=1, have=[], last=hash))

        if hasattr(self.factory, 'resetDelay'):
            self.factory.resetDelay()
        if hasattr(self.factory, 'gotConnection'):
            self.factory.gotConnection(self)

        self.pinger = deferral.RobustLoopingCall(self.send_ping, nonce=1234)
        self.pinger.start(30)

    # https://github.com/dashpay/dash/blob/v0.12.1.x/src/protocol.h#L338-L362
    message_inv = pack.ComposedType([
        ('invs',
         pack.ListType(
             pack.ComposedType([
                 ('type',
                  pack.EnumType(
                      pack.IntType(32), {
                          1: 'tx',
                          2: 'block',
                          3: 'filtered_block',
                          4: 'txlock_request',
                          5: 'txlock_vote',
                          6: 'spork',
                          7: 'masternode_winner',
                          8: 'masternode_scanning_error',
                          9: 'budget_vote',
                          10: 'budget_proposal',
                          11: 'budget_finalized',
                          12: 'budget_finalized_vote',
                          13: 'masternode_quorum',
                          14: 'masternode_announce',
                          15: 'masternode_ping',
                          16: 'dstx',
                          17: 'governance_object',
                          18: 'governance_object_vote',
                          19: 'masternode_verify'
                      })),
                 ('hash', pack.IntType(256)),
             ]))),
    ])

    def handle_inv(self, invs):
        for inv in invs:
            if inv['type'] == 'tx':
                self.send_getdata(requests=[inv])
            elif inv['type'] == 'block':
                self.factory.new_block.happened(inv['hash'])
            else:
                if p2pool.DEBUG:
                    print 'Unneeded inv type', inv

    message_getdata = pack.ComposedType([
        ('requests',
         pack.ListType(
             pack.ComposedType([
                 ('type', pack.EnumType(pack.IntType(32), {
                     1: 'tx',
                     2: 'block'
                 })),
                 ('hash', pack.IntType(256)),
             ]))),
    ])
    message_getblocks = pack.ComposedType([
        ('version', pack.IntType(32)),
        ('have', pack.ListType(pack.IntType(256))),
        ('last', pack.PossiblyNoneType(0, pack.IntType(256))),
    ])
    message_getheaders = pack.ComposedType([
        ('version', pack.IntType(32)),
        ('have', pack.ListType(pack.IntType(256))),
        ('last', pack.PossiblyNoneType(0, pack.IntType(256))),
    ])
    message_getaddr = pack.ComposedType([])

    message_addr = pack.ComposedType([
        ('addrs',
         pack.ListType(
             pack.ComposedType([
                 ('timestamp', pack.IntType(32)),
                 ('address', dash_data.address_type),
             ]))),
    ])

    def handle_addr(self, addrs):
        for addr in addrs:
            pass

    message_tx = pack.ComposedType([
        ('tx', dash_data.tx_type),
    ])

    def handle_tx(self, tx):
        self.factory.new_tx.happened(tx)

    message_block = pack.ComposedType([
        ('block', dash_data.block_type),
    ])

    def handle_block(self, block):
        block_hash = self.net.BLOCKHASH_FUNC(
            dash_data.block_header_type.pack(block['header']))
        self.get_block.got_response(block_hash, block)
        self.get_block_header.got_response(block_hash, block['header'])

    message_block_old = pack.ComposedType([
        ('block', dash_data.block_type_old),
    ])

    def handle_block_old(self, block):
        block_hash = self.net.BLOCKHASH_FUNC(
            dash_data.block_header_type.pack(block['header']))
        self.get_block.got_response(block_hash, block)
        self.get_block_header.got_response(block_hash, block['header'])

    message_headers = pack.ComposedType([
        ('headers', pack.ListType(dash_data.block_type_old)),
    ])

    def handle_headers(self, headers):
        for header in headers:
            header = header['header']
            self.get_block_header.got_response(
                self.net.BLOCKHASH_FUNC(
                    dash_data.block_header_type.pack(header)), header)
        self.factory.new_headers.happened(
            [header['header'] for header in headers])

    message_ping = pack.ComposedType([
        ('nonce', pack.IntType(64)),
    ])

    def handle_ping(self, nonce):
        self.send_pong(nonce=nonce)

    message_pong = pack.ComposedType([
        ('nonce', pack.IntType(64)),
    ])

    def handle_pong(self, nonce):
        pass

    message_alert = pack.ComposedType([
        ('message', pack.VarStrType()),
        ('signature', pack.VarStrType()),
    ])

    def handle_alert(self, message, signature):
        pass  # print 'ALERT:', (message, signature)

    def connectionLost(self, reason):
        if hasattr(self.factory, 'gotConnection'):
            self.factory.gotConnection(None)
        if hasattr(self, 'pinger'):
            self.pinger.stop()
        if p2pool.DEBUG:
            print >> sys.stderr, 'Dashd connection lost. Reason:', reason.getErrorMessage(
            )
Esempio n. 7
0
class Share(object):
    VERSION = 9
    SUCCESSOR = None

    other_txs = None

    small_block_header_type = pack.ComposedType([
        ('version', pack.VarIntType()),
        ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('timestamp', pack.IntType(32)),
        ('bits', bitcoin_data.FloatingIntegerType()),
        ('nonce', pack.IntType(32)),
    ])

    share_info_type = pack.ComposedType([
        ('share_data',
         pack.ComposedType([
             ('previous_share_hash',
              pack.PossiblyNoneType(0, pack.IntType(256))),
             ('coinbase', pack.VarStrType()),
             ('nonce', pack.IntType(32)),
             ('script', pack.VarStrType()),
             ('subsidy', pack.IntType(64)),
             ('donation', pack.IntType(16)),
             ('stale_info',
              pack.EnumType(
                  pack.IntType(8),
                  dict((k, {
                      0: None,
                      253: 'orphan',
                      254: 'doa'
                  }.get(k, 'unk%i' % (k, ))) for k in xrange(256)))),
             ('desired_version', pack.VarIntType()),
         ])),
        ('new_transaction_hashes', pack.ListType(pack.IntType(256))),
        ('transaction_hash_refs',
         pack.ListType(pack.VarIntType(),
                       2)),  # pairs of share_count, tx_count
        ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('max_bits', bitcoin_data.FloatingIntegerType()),
        ('bits', bitcoin_data.FloatingIntegerType()),
        ('timestamp', pack.IntType(32)),
    ])

    share_type = pack.ComposedType([
        ('min_header', small_block_header_type),
        ('share_info', share_info_type),
        ('ref_merkle_link',
         pack.ComposedType([
             ('branch', pack.ListType(pack.IntType(256))),
             ('index', pack.IntType(0)),
         ])),
        ('last_txout_nonce', pack.IntType(32)),
        ('hash_link', hash_link_type),
        (
            'merkle_link',
            pack.ComposedType([
                ('branch', pack.ListType(pack.IntType(256))),
                ('index', pack.IntType(0)),  # it will always be 0
            ])),
    ])

    ref_type = pack.ComposedType([
        ('identifier', pack.FixedStrType(64 // 8)),
        ('share_info', share_info_type),
    ])

    gentx_before_refhash = pack.VarStrType(
    ).pack(DONATION_SCRIPT) + pack.IntType(64).pack(minout) + pack.VarStrType(
    ).pack('\x24' + pack.IntType(256).pack(0) + pack.IntType(32).pack(0))[:2]

    @classmethod
    def generate_transaction(cls,
                             tracker,
                             share_data,
                             block_target,
                             desired_timestamp,
                             desired_target,
                             ref_merkle_link,
                             desired_other_transaction_hashes_and_fees,
                             net,
                             known_txs=None,
                             last_txout_nonce=0,
                             base_subsidy=None):

        previous_share = tracker.items[
            share_data['previous_share_hash']] if share_data[
                'previous_share_hash'] is not None else None

        def get_coinbase_fee(share_data, outpointsnum):
            # calculate neccessary coinbase fee

            # coinbase usually seems like this:
            #
            # 01000000 - nVersion
            # 1a184351 - nTimestamp

            # 01 - Inputs num
            # 0000000000000000000000000000000000000000000000000000000000000000 - Input hash
            # ffffffff - Input index (-1)
            # 0a02732a062f503253482f - Scriptsig
            # ffffffff - nSequence

            # 15 - Outpoints num
            # (User outpoints, 44 bytes per each)
            # (Donation outpoint, 76 bytes)

            # P2Pool service outpoint (contains merkle link), 46 bytes
            #
            # 1027000000000000
            # 25
            # 2417cc2063b11fd5255c7e5605780de78163ffc698ed22856bff1a5d880c3c44e400000000

            # Giving users some time to upgrade
            coinbase_size = 50 + (
                1 + len(share_data['coinbase'])) + outpointsnum * 44 + 76 + 46

            # if coinbase size is greater than 1000 bytes, it should pay fee (0.01 per 1000 bytes)
            if coinbase_size > 1000:
                return int(ceil(coinbase_size / 1000.0) * minout)

            return 0

        if base_subsidy is None:
            base_subsidy = net.PARENT.SUBSIDY_FUNC(block_target)

        # current user payout script

        this_script = share_data['script']

        height, last = tracker.get_height_and_last(
            share_data['previous_share_hash'])

        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(
                tracker,
                share_data['previous_share_hash'],
                net.TARGET_LOOKBEHIND,
                min_work=True,
                integer=True)
            pre_target = 2**256 // (
                net.SHARE_PERIOD *
                attempts_per_second) - 1 if attempts_per_second else 2**256 - 1
            pre_target2 = math.clip(pre_target,
                                    (previous_share.max_target * 9 // 10,
                                     previous_share.max_target * 11 // 10))
            pre_target3 = math.clip(pre_target2,
                                    (net.MIN_TARGET, net.MAX_TARGET))
        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(
            pre_target3)
        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(
            math.clip(desired_target, (pre_target3 // 10, pre_target3)))

        if p2pool.DEBUG:
            print
            print "Share Info Bits Target (DATA)"
            print bitcoin_data.target_to_difficulty(bits.target)
            print

        new_transaction_hashes = []
        new_transaction_size = 0
        transaction_hash_refs = []
        other_transaction_hashes = []

        past_shares = list(
            tracker.get_chain(share_data['previous_share_hash'],
                              min(height, 100)))
        tx_hash_to_this = {}
        for i, share in enumerate(past_shares):
            for j, tx_hash in enumerate(share.new_transaction_hashes):
                if tx_hash not in tx_hash_to_this:
                    tx_hash_to_this[tx_hash] = [1 + i,
                                                j]  # share_count, tx_count
        for tx_hash, fee in desired_other_transaction_hashes_and_fees:
            if tx_hash in tx_hash_to_this:
                this = tx_hash_to_this[tx_hash]
            else:
                if known_txs is not None:
                    this_size = bitcoin_data.tx_type.packed_size(
                        known_txs[tx_hash])
                    if new_transaction_size + this_size > 50000:  # only allow 50 kB of new txns/share
                        break
                    new_transaction_size += this_size
                new_transaction_hashes.append(tx_hash)
                this = [0, len(new_transaction_hashes) - 1]
            transaction_hash_refs.extend(this)
            other_transaction_hashes.append(tx_hash)

        included_transactions = set(other_transaction_hashes)

        share_data = dict(share_data, subsidy=base_subsidy)

        raw_weights, total_weight, donation_weight = tracker.get_cumulative_weights(
            share_data['previous_share_hash'],
            min(height, net.REAL_CHAIN_LENGTH),
            65535 * net.SPREAD *
            bitcoin_data.target_to_average_attempts(block_target),
        )

        # calculate "raw" subsidy
        raw_subsidy = share_data['subsidy'] - 4 * minout - get_coinbase_fee(
            share_data,
            len(raw_weights) + 1)

        # calculate "raw" amounts
        raw_amounts = dict((script, raw_subsidy * weight // total_weight)
                           for script, weight in raw_weights.iteritems())

        total_remowed_weight = 0
        weights = {}

        # iterate list and collect all weights, which produces less than 0.01 payout
        # it's neccessary due to NVC/PPC protocol-level limitations for coinbase outpoint size
        for x in raw_amounts.keys():
            if raw_amounts[x] < minout and x not in [
                    this_script, DONATION_SCRIPT
            ]:
                total_remowed_weight = total_remowed_weight + raw_weights[x]
            else:
                weights[x] = raw_weights[x]

        total_weight = total_weight - total_remowed_weight
        assert total_weight == sum(weights.itervalues()) + donation_weight, (
            total_weight, sum(weights.itervalues()) + donation_weight)

        # base subsidy value calculated as:
        # [subsidy - (0.01 for donation + 0.01 for current user + 0.01 for p2pool outpoint) - netfee]
        my_subsidy = share_data['subsidy'] - 3 * minout - get_coinbase_fee(
            share_data,
            len(weights) + 1)

        # subsidy goes according to weights prior to this share
        amounts = dict((script, my_subsidy * weight // total_weight)
                       for script, weight in weights.iteritems())

        # all that's left over is the donation weight and some extra satoshis due to rounding
        amounts[DONATION_SCRIPT] = amounts.get(
            DONATION_SCRIPT, 0) + my_subsidy - sum(amounts.itervalues())

        if sum(amounts.itervalues()) != my_subsidy or any(
                x < 0 for x in amounts.itervalues()):
            raise ValueError()

        # add 0.01 coin to donation, to satisfy the protocol
        amounts[DONATION_SCRIPT] = amounts[DONATION_SCRIPT] + minout

        # add 0.01 to current user output, to satisfy the protocol
        amounts[this_script] = amounts.get(this_script, 0) + minout

        dests = sorted(
            amounts.iterkeys(),
            key=lambda script:
            (script == DONATION_SCRIPT, amounts[script], script))[
                -4000:]  # block length limit, unlikely to ever be hi

        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else
            tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,
            timestamp=desired_timestamp,  # need better solution
            #           timestamp=math.clip(desired_timestamp, (
            #               (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
            #               (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
            #           )) if previous_share is not None else desired_timestamp,
            new_transaction_hashes=new_transaction_hashes,
            transaction_hash_refs=transaction_hash_refs,
        )

        if p2pool.DEBUG:
            print
            print "Desired timestamp (DATA)"
            print desired_timestamp
            print time.time()
            print
            print "Prev Share timestamp (DATA)"
            print previous_share.timestamp
            print time.time()
            print
            print "Share info timestamp (DATA)"
            print share_info['timestamp']
            print time.time()
            print

        gentx = dict(
            version=1,
            # coinbase timestamp must be older than share/block timestamp
            # maybe there are more elegant solution, but this hack works quite well for now
            timestamp=share_info['timestamp'],
            tx_ins=[
                dict(
                    previous_output=None,
                    sequence=None,
                    script=share_data['coinbase'],
                )
            ],
            tx_outs=[
                dict(value=amounts[script], script=script) for script in dests
                if amounts[script] or script == DONATION_SCRIPT
            ] + [
                dict(
                    # add 0.01 coin to service output, to satisfy the protocol
                    value=minout,
                    script='\x24' +
                    cls.get_ref_hash(net, share_info, ref_merkle_link) +
                    pack.IntType(32).pack(last_txout_nonce),
                )
            ],
            lock_time=0,
        )

        def get_share(header, last_txout_nonce=last_txout_nonce):
            min_header = dict(header)
            del min_header['merkle_root']
            share = cls(
                net, None,
                dict(
                    min_header=min_header,
                    share_info=share_info,
                    ref_merkle_link=dict(branch=[], index=0),
                    last_txout_nonce=last_txout_nonce,
                    hash_link=prefix_to_hash_link(
                        bitcoin_data.tx_type.pack(gentx)[:-32 - 4 - 4],
                        cls.gentx_before_refhash),
                    merkle_link=bitcoin_data.calculate_merkle_link(
                        [None] + other_transaction_hashes, 0),
                ))
            assert share.header == header  # checks merkle_root
            return share

        return share_info, gentx, other_transaction_hashes, get_share

    @classmethod
    def get_ref_hash(cls, net, share_info, ref_merkle_link):
        return pack.IntType(256).pack(
            bitcoin_data.check_merkle_link(
                bitcoin_data.hash256(
                    cls.ref_type.pack(
                        dict(
                            identifier=net.IDENTIFIER,
                            share_info=share_info,
                        ))), ref_merkle_link))

    __slots__ = 'net peer_addr contents min_header share_info hash_link merkle_link hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen'.split(
        ' ')

    def __init__(self, net, peer_addr, contents):
        self.net = net
        self.peer_addr = peer_addr
        self.contents = contents

        self.min_header = contents['min_header']
        self.share_info = contents['share_info']
        self.hash_link = contents['hash_link']
        self.merkle_link = contents['merkle_link']

        if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
            raise ValueError(
                '''bad coinbase size! %i bytes''' %
                (len(self.share_info['share_data']['coinbase']), ))

        if len(self.merkle_link['branch']) > 16:
            raise ValueError('merkle branch too long!')

        assert not self.hash_link['extra_data'], repr(
            self.hash_link['extra_data'])

        self.share_data = self.share_info['share_data']
        self.max_target = self.share_info['max_bits'].target
        self.target = self.share_info['bits'].target
        self.timestamp = self.share_info['timestamp']
        self.previous_hash = self.share_data['previous_share_hash']
        self.new_script = self.share_data['script']
        self.desired_version = self.share_data['desired_version']

        n = set()
        for share_count, tx_count in self.iter_transaction_hash_refs():
            assert share_count < 110
            if share_count == 0:
                n.add(tx_count)
        assert n == set(range(len(self.share_info['new_transaction_hashes'])))

        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info,
                              contents['ref_merkle_link']) +
            pack.IntType(32).pack(self.contents['last_txout_nonce']) +
            pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash,
                                                     self.merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(
            bitcoin_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = net.PARENT.BLOCKHASH_FUNC(
            bitcoin_data.block_header_type.pack(self.header))

        if self.target > net.MAX_TARGET:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share target invalid')

        if self.pow_hash > self.target:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share PoW invalid')

        self.new_transaction_hashes = self.share_info['new_transaction_hashes']

        # XXX eww
        self.time_seen = time.time()

    def __repr__(self):
        return 'Share' + repr((self.net, self.peer_addr, self.contents))

    def as_share(self):
        return dict(type=self.VERSION,
                    contents=self.share_type.pack(self.contents))

    def iter_transaction_hash_refs(self):
        return zip(self.share_info['transaction_hash_refs'][::2],
                   self.share_info['transaction_hash_refs'][1::2])

    def check(self, tracker):

        from p2pool import p2p
        if self.share_data['previous_share_hash'] is not None:
            previous_share = tracker.items[
                self.share_data['previous_share_hash']]
            if type(self) is type(previous_share):
                pass
            elif type(self) is type(previous_share).SUCCESSOR:
                if tracker.get_height(
                        previous_share.hash) < self.net.CHAIN_LENGTH:
                    from p2pool import p2p
                    raise p2p.PeerMisbehavingError(
                        'switch without enough history')

                # switch only valid if 85% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version
                counts = get_desired_version_counts(
                    tracker,
                    tracker.get_nth_parent_hash(
                        previous_share.hash, self.net.CHAIN_LENGTH * 9 // 10),
                    self.net.CHAIN_LENGTH // 10)
                if counts.get(self.VERSION,
                              0) < sum(counts.itervalues()) * 85 // 100:
                    raise p2p.PeerMisbehavingError(
                        'switch without enough hash power upgraded')
            else:
                raise p2p.PeerMisbehavingError(
                    '''%s can't follow %s''' %
                    (type(self).__name__, type(previous_share).__name__))

        other_tx_hashes = [
            tracker.items[tracker.get_nth_parent_hash(
                self.hash,
                share_count)].share_info['new_transaction_hashes'][tx_count]
            for share_count, tx_count in self.iter_transaction_hash_refs()
        ]
        #        print self

        share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(
            tracker,
            self.share_info['share_data'],
            self.header['bits'].target,
            self.share_info['timestamp'],
            self.share_info['bits'].target,
            self.contents['ref_merkle_link'],
            [(h, None) for h in other_tx_hashes],
            self.net,
            last_txout_nonce=self.contents['last_txout_nonce'],
            base_subsidy=None)

        assert other_tx_hashes2 == other_tx_hashes

        # fixme: commented out / workaround

        #if share_info != self.share_info:
        #    print share_info, self.share_info
        #    raise ValueError('share_info invalid')

        #print self.share_info['share_data']
        #print gentx
        #print other_tx_hashes

        if bitcoin_data.hash256(
                bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash:
            raise ValueError('''gentx doesn't match hash_link''')

        if bitcoin_data.calculate_merkle_link([None] + other_tx_hashes,
                                              0) != self.merkle_link:
            raise ValueError('merkle_link and other_tx_hashes do not match')

        return gentx  # only used by as_block

    def get_other_tx_hashes(self, tracker):
        parents_needed = max(
            share_count
            for share_count, tx_count in self.iter_transaction_hash_refs(
            )) if self.share_info['transaction_hash_refs'] else 0
        parents = tracker.get_height(self.hash) - 1
        if parents < parents_needed:
            return None
        last_shares = list(tracker.get_chain(self.hash, parents_needed + 1))
        return [
            last_shares[share_count].share_info['new_transaction_hashes']
            [tx_count]
            for share_count, tx_count in self.iter_transaction_hash_refs()
        ]

    def _get_other_txs(self, tracker, known_txs):
        other_tx_hashes = self.get_other_tx_hashes(tracker)
        if other_tx_hashes is None:
            return None  # not all parents present

        if not all(tx_hash in known_txs for tx_hash in other_tx_hashes):
            return None  # not all txs present

        return [known_txs[tx_hash] for tx_hash in other_tx_hashes]

    def should_punish_reason(self, previous_block, bits, tracker, known_txs):
        if (self.header['previous_block'], self.header['bits']) != (
                previous_block, bits
        ) and self.header_hash != previous_block and self.peer_addr is not None:
            return True, 'Block-stale detected!'

        if self.pow_hash <= self.header['bits'].target:
            return -1, 'block solution'

        other_txs = self._get_other_txs(tracker, known_txs)
        if other_txs is None:
            if self.time_seen != 0:  # ignore if loaded from ShareStore
                return True, 'not all txs present'
        else:
            all_txs_size = sum(
                bitcoin_data.tx_type.packed_size(tx) for tx in other_txs)
            if all_txs_size > 1000000:
                return True, 'txs over block size limit'

            new_txs_size = sum(
                bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
                for tx_hash in self.share_info['new_transaction_hashes'])
            if new_txs_size > 50000:
                return True, 'new txs over limit'

        return False, None

    def as_block(self, tracker, known_txs):
        other_txs = self._get_other_txs(tracker, known_txs)
        if other_txs is None:
            return None  # not all txs present
        return dict(header=self.header,
                    txs=[self.check(tracker)] + other_txs,
                    signature='')
Esempio n. 8
0
class BaseShare(object):
    VERSION = 0
    VOTING_VERSION = 0
    SUCCESSOR = None
    
    small_block_header_type = pack.ComposedType([
        ('version', pack.VarIntType()),
        ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('timestamp', pack.IntType(32)),
        ('bits', bitcoin_data.FloatingIntegerType()),
        ('nonce', pack.IntType(32)),
    ])
    share_info_type = None
    share_type = None
    ref_type = None

    gentx_before_refhash = pack.VarStrType().pack(DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack('\x6a\x28' + pack.IntType(256).pack(0) + pack.IntType(64).pack(0))[:3]

    gentx_size = 50000 # conservative estimate, will be overwritten during execution
    gentx_weight = 200000
    cached_types = None
    @classmethod
    def get_dynamic_types(cls, net):
        if not cls.cached_types == None:
            return cls.cached_types
        t = dict(share_info_type=None, share_type=None, ref_type=None)
        segwit_data = ('segwit_data', pack.PossiblyNoneType(dict(txid_merkle_link=dict(branch=[], index=0), wtxid_merkle_root=2**256-1), pack.ComposedType([
            ('txid_merkle_link', pack.ComposedType([
                ('branch', pack.ListType(pack.IntType(256))),
                ('index', pack.IntType(0)), # it will always be 0
            ])),
            ('wtxid_merkle_root', pack.IntType(256))
        ])))
        t['share_info_type'] = pack.ComposedType([
            ('share_data', pack.ComposedType([
                ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
                ('coinbase', pack.VarStrType()),
                ('nonce', pack.IntType(32)),
                ('pubkey_hash', pack.IntType(160)),
                ('subsidy', pack.IntType(64)),
                ('donation', pack.IntType(16)),
                ('stale_info', pack.EnumType(pack.IntType(8), dict((k, {0: None, 253: 'orphan', 254: 'doa'}.get(k, 'unk%i' % (k,))) for k in xrange(256)))),
                ('desired_version', pack.VarIntType()),
            ]))] + ([segwit_data] if is_segwit_activated(cls.VERSION, net) else []) + [
            ('new_transaction_hashes', pack.ListType(pack.IntType(256))),
            ('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2)), # pairs of share_count, tx_count
            ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
            ('max_bits', bitcoin_data.FloatingIntegerType()),
            ('bits', bitcoin_data.FloatingIntegerType()),
            ('timestamp', pack.IntType(32)),
            ('absheight', pack.IntType(32)),
            ('abswork', pack.IntType(128)),
        ])
        t['share_type'] = pack.ComposedType([
            ('min_header', cls.small_block_header_type),
            ('share_info', t['share_info_type']),
            ('ref_merkle_link', pack.ComposedType([
                ('branch', pack.ListType(pack.IntType(256))),
                ('index', pack.IntType(0)),
            ])),
            ('last_txout_nonce', pack.IntType(64)),
            ('hash_link', hash_link_type),
            ('merkle_link', pack.ComposedType([
                ('branch', pack.ListType(pack.IntType(256))),
                ('index', pack.IntType(0)), # it will always be 0
            ])),
        ])
        t['ref_type'] = pack.ComposedType([
            ('identifier', pack.FixedStrType(64//8)),
            ('share_info', t['share_info_type']),
        ])
        cls.cached_types = t
        return t

    @classmethod
    def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None, segwit_data=None):
        t0 = time.time()
        previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
        
        height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
            pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
            pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
            pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET))
        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//30, pre_target3)))
        
        new_transaction_hashes = []
        new_transaction_size = 0 # including witnesses
        all_transaction_stripped_size = 0 # stripped size
        all_transaction_real_size = 0 # including witnesses, for statistics
        new_transaction_weight = 0
        all_transaction_weight = 0
        transaction_hash_refs = []
        other_transaction_hashes = []
        t1 = time.time()
        past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100)))
        tx_hash_to_this = {}
        for i, share in enumerate(past_shares):
            for j, tx_hash in enumerate(share.new_transaction_hashes):
                if tx_hash not in tx_hash_to_this:
                    tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count
        t2 = time.time()
        for tx_hash, fee in desired_other_transaction_hashes_and_fees:
            if known_txs is not None:
                this_stripped_size = bitcoin_data.tx_id_type.packed_size(known_txs[tx_hash])
                this_real_size     = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
                this_weight        = this_real_size + 3*this_stripped_size
            else: # we're just verifying someone else's share. We'll calculate sizes in should_punish_reason()
                this_stripped_size = 0
                this_real_size = 0
                this_weight = 0

            if all_transaction_stripped_size + this_stripped_size + 80 + cls.gentx_size +  500 > net.BLOCK_MAX_SIZE:
                break
            if all_transaction_weight + this_weight + 4*80 + cls.gentx_weight + 2000 > net.BLOCK_MAX_WEIGHT:
                break

            if tx_hash in tx_hash_to_this:
                this = tx_hash_to_this[tx_hash]
                if known_txs is not None:
                    all_transaction_stripped_size += this_stripped_size
                    all_transaction_real_size += this_real_size
                    all_transaction_weight += this_weight
            else:
                if known_txs is not None:
                    new_transaction_size += this_real_size
                    all_transaction_stripped_size += this_stripped_size
                    all_transaction_real_size += this_real_size
                    new_transaction_weight += this_weight
                    all_transaction_weight += this_weight
                new_transaction_hashes.append(tx_hash)
                this = [0, len(new_transaction_hashes)-1]
            transaction_hash_refs.extend(this)
            other_transaction_hashes.append(tx_hash)

        t3 = time.time()
        if transaction_hash_refs and max(transaction_hash_refs) < 2**16:
            transaction_hash_refs = array.array('H', transaction_hash_refs)
        elif transaction_hash_refs and max(transaction_hash_refs) < 2**32: # in case we see blocks with more than 65536 tx
            transaction_hash_refs = array.array('L', transaction_hash_refs)
        t4 = time.time()

        if all_transaction_stripped_size:
            print "Generating a share with %i bytes, %i WU (new: %i B, %i WU) in %i tx (%i new), plus est gentx of %i bytes/%i WU" % (
                all_transaction_real_size,
                all_transaction_weight,
                new_transaction_size,
                new_transaction_weight,
                len(other_transaction_hashes),
                len(new_transaction_hashes),
                cls.gentx_size,
                cls.gentx_weight)
            print "Total block stripped size=%i B, full size=%i B,  weight: %i WU" % (
                80+all_transaction_stripped_size+cls.gentx_size, 
                80+all_transaction_real_size+cls.gentx_size, 
                3*80+all_transaction_weight+cls.gentx_weight)

        included_transactions = set(other_transaction_hashes)
        removed_fees = [fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions]
        definite_fees = sum(0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions)
        if None not in removed_fees:
            share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees))
        else:
            assert base_subsidy is not None
            share_data = dict(share_data, subsidy=base_subsidy + definite_fees)
        
        weights, total_weight, donation_weight = tracker.get_cumulative_weights(previous_share.share_data['previous_share_hash'] if previous_share is not None else None,
            max(0, min(height, net.REAL_CHAIN_LENGTH) - 1),
            65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
        )
        assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
        
        amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
        this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
        amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
        amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
        
        if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
            raise ValueError()
        
        dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit

        segwit_activated = is_segwit_activated(cls.VERSION, net)
        if segwit_data is None and known_txs is None:
            segwit_activated = False
        if not(segwit_activated or known_txs is None) and any(bitcoin_data.is_segwit_tx(known_txs[h]) for h in other_transaction_hashes):
            raise ValueError('segwit transaction included before activation')
        if segwit_activated and known_txs is not None:
            share_txs = [(known_txs[h], bitcoin_data.get_txid(known_txs[h]), h) for h in other_transaction_hashes]
            segwit_data = dict(txid_merkle_link=bitcoin_data.calculate_merkle_link([None] + [tx[1] for tx in share_txs], 0), wtxid_merkle_root=bitcoin_data.merkle_hash([0] + [bitcoin_data.get_wtxid(tx[0], tx[1], tx[2]) for tx in share_txs]))
        if segwit_activated and segwit_data is not None:
            witness_reserved_value_str = '[P2Pool]'*4
            witness_reserved_value = pack.IntType(256).unpack(witness_reserved_value_str)
            witness_commitment_hash = bitcoin_data.get_witness_commitment_hash(segwit_data['wtxid_merkle_root'], witness_reserved_value)

        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,

            timestamp=(math.clip(desired_timestamp, (
                        (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
                        (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),)) if previous_share is not None else desired_timestamp
                      ) if cls.VERSION < 32 else
                      max(desired_timestamp, (previous_share.timestamp + 1)) if previous_share is not None else desired_timestamp,
            new_transaction_hashes=new_transaction_hashes,
            transaction_hash_refs=transaction_hash_refs,
            absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32,
            abswork=((previous_share.abswork if previous_share is not None else 0) + bitcoin_data.target_to_average_attempts(bits.target)) % 2**128,
        )

        if previous_share != None and desired_timestamp > previous_share.timestamp + 180:
            print "Warning: Previous share's timestamp is %i seconds old." % int(desired_timestamp - previous_share.timestamp)
            print "Make sure your system clock is accurate, and ensure that you're connected to decent peers."
            print "If your clock is more than 300 seconds behind, it can result in orphaned shares."
            print "(It's also possible that this share is just taking a long time to mine.)"
        if previous_share != None and previous_share.timestamp > int(time.time()) + 3:
            print "WARNING! Previous share's timestamp is %i seconds in the future. This is not normal." % \
                   int(previous_share.timestamp - (int(time.time())))
            print "Make sure your system clock is accurate. Errors beyond 300 sec result in orphaned shares."

        if segwit_activated:
            share_info['segwit_data'] = segwit_data
        
        gentx = dict(
            version=1,
            tx_ins=[dict(
                previous_output=None,
                sequence=None,
                script=share_data['coinbase'],
            )],
            tx_outs=([dict(value=0, script='\x6a\x24\xaa\x21\xa9\xed' + pack.IntType(256).pack(witness_commitment_hash))] if segwit_activated else []) +
                [dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT] +
                [dict(value=0, script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce))],
            lock_time=0,
        )
        if segwit_activated:
            gentx['marker'] = 0
            gentx['flag'] = 1
            gentx['witness'] = [[witness_reserved_value_str]]
        
        def get_share(header, last_txout_nonce=last_txout_nonce):
            min_header = dict(header); del min_header['merkle_root']
            share = cls(net, None, dict(
                min_header=min_header,
                share_info=share_info,
                ref_merkle_link=dict(branch=[], index=0),
                last_txout_nonce=last_txout_nonce,
                hash_link=prefix_to_hash_link(bitcoin_data.tx_id_type.pack(gentx)[:-32-8-4], cls.gentx_before_refhash),
                merkle_link=bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0),
            ))
            assert share.header == header # checks merkle_root
            return share
        t5 = time.time()
        if p2pool.BENCH: print "%8.3f ms for data.py:generate_transaction(). Parts: %8.3f %8.3f %8.3f %8.3f %8.3f " % (
            (t5-t0)*1000.,
            (t1-t0)*1000.,
            (t2-t1)*1000.,
            (t3-t2)*1000.,
            (t4-t3)*1000.,
            (t5-t4)*1000.)
        return share_info, gentx, other_transaction_hashes, get_share
    
    @classmethod
    def get_ref_hash(cls, net, share_info, ref_merkle_link):
        return pack.IntType(256).pack(bitcoin_data.check_merkle_link(bitcoin_data.hash256(cls.get_dynamic_types(net)['ref_type'].pack(dict(
            identifier=net.IDENTIFIER,
            share_info=share_info,
        ))), ref_merkle_link))
    
    __slots__ = 'net peer_addr contents min_header share_info hash_link merkle_link hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen absheight abswork'.split(' ')
    
    def __init__(self, net, peer_addr, contents):
        dynamic_types = self.get_dynamic_types(net)
        self.share_info_type = dynamic_types['share_info_type']
        self.share_type = dynamic_types['share_type']
        self.ref_type = dynamic_types['ref_type']

        self.net = net
        self.peer_addr = peer_addr
        self.contents = contents
        
        self.min_header = contents['min_header']
        self.share_info = contents['share_info']
        self.hash_link = contents['hash_link']
        self.merkle_link = contents['merkle_link']

        # save some memory if we can
        txrefs = self.share_info['transaction_hash_refs']
        if txrefs and max(txrefs) < 2**16:
            self.share_info['transaction_hash_refs'] = array.array('H', txrefs)
        elif txrefs and max(txrefs) < 2**32: # in case we see blocks with more than 65536 tx in the future
            self.share_info['transaction_hash_refs'] = array.array('L', txrefs)
        
        segwit_activated = is_segwit_activated(self.VERSION, net)
        
        if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
            raise ValueError('''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']),))
        
        if len(self.merkle_link['branch']) > 16 or (segwit_activated and len(self.share_info['segwit_data']['txid_merkle_link']['branch']) > 16):
            raise ValueError('merkle branch too long!')
        
        assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data'])
        
        self.share_data = self.share_info['share_data']
        self.max_target = self.share_info['max_bits'].target
        self.target = self.share_info['bits'].target
        self.timestamp = self.share_info['timestamp']
        self.previous_hash = self.share_data['previous_share_hash']
        self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
        self.desired_version = self.share_data['desired_version']
        self.absheight = self.share_info['absheight']
        self.abswork = self.share_info['abswork']
        if net.NAME == 'bitcoin' and self.absheight > 3927800 and self.desired_version == 16:
            raise ValueError("This is not a hardfork-supporting share!")
        
        n = set()
        for share_count, tx_count in self.iter_transaction_hash_refs():
            assert share_count < 110
            if share_count == 0:
                n.add(tx_count)
        assert n == set(range(len(self.share_info['new_transaction_hashes'])))
        
        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.share_info['segwit_data']['txid_merkle_link'] if segwit_activated else self.merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
        
        if self.target > net.MAX_TARGET:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share target invalid')
        
        if self.pow_hash > self.target:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share PoW invalid')
        
        self.new_transaction_hashes = self.share_info['new_transaction_hashes']
        
        # XXX eww
        self.time_seen = time.time()
    
    def __repr__(self):
        return 'Share' + repr((self.net, self.peer_addr, self.contents))
    
    def as_share(self):
        return dict(type=self.VERSION, contents=self.share_type.pack(self.contents))
    
    def iter_transaction_hash_refs(self):
        return zip(self.share_info['transaction_hash_refs'][::2], self.share_info['transaction_hash_refs'][1::2])
    
    def check(self, tracker, other_txs=None):
        from p2pool import p2p
        if self.timestamp > int(time.time()) + 600:
            raise ValueError("Share timestamp is %i seconds in the future! Check your system clock." % \
                self.timestamp - int(time.time()))
        counts = None
        if self.share_data['previous_share_hash'] is not None:
            previous_share = tracker.items[self.share_data['previous_share_hash']]
            if tracker.get_height(self.share_data['previous_share_hash']) >= self.net.CHAIN_LENGTH:
                counts = get_desired_version_counts(tracker, tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10)
                if type(self) is type(previous_share):
                    pass
                elif type(self) is type(previous_share).SUCCESSOR:
                    # switch only valid if 60% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version
                    if counts.get(self.VERSION, 0) < sum(counts.itervalues())*60//100:
                        raise p2p.PeerMisbehavingError('switch without enough hash power upgraded')
                else:
                    raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__))
            elif type(self) is type(previous_share).SUCCESSOR:
                raise p2p.PeerMisbehavingError('switch without enough history')
        
        other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
        if other_txs is not None and not isinstance(other_txs, dict): other_txs = dict((bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)), tx) for tx in other_txs)
        
        share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net,
            known_txs=other_txs, last_txout_nonce=self.contents['last_txout_nonce'], segwit_data=self.share_info.get('segwit_data', None))
        

        assert other_tx_hashes2 == other_tx_hashes
        if share_info != self.share_info:
            raise ValueError('share_info invalid')
        if bitcoin_data.get_txid(gentx) != self.gentx_hash:
            raise ValueError('''gentx doesn't match hash_link''')
        if bitcoin_data.calculate_merkle_link([None] + other_tx_hashes, 0) != self.merkle_link: # the other hash commitments are checked in the share_info assertion
            raise ValueError('merkle_link and other_tx_hashes do not match')
        
        update_min_protocol_version(counts, self)

        self.gentx_size = len(bitcoin_data.tx_id_type.pack(gentx))
        self.gentx_weight = len(bitcoin_data.tx_type.pack(gentx)) + 3*self.gentx_size

        type(self).gentx_size   = self.gentx_size # saving this share's gentx size as a class variable is an ugly hack, and you're welcome to hate me for doing it. But it works.
        type(self).gentx_weight = self.gentx_weight

        return gentx # only used by as_block
    
    def get_other_tx_hashes(self, tracker):
        parents_needed = max(share_count for share_count, tx_count in self.iter_transaction_hash_refs()) if self.share_info['transaction_hash_refs'] else 0
        parents = tracker.get_height(self.hash) - 1
        if parents < parents_needed:
            return None
        last_shares = list(tracker.get_chain(self.hash, parents_needed + 1))
        return [last_shares[share_count].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
    
    def _get_other_txs(self, tracker, known_txs):
        other_tx_hashes = self.get_other_tx_hashes(tracker)
        if other_tx_hashes is None:
            return None # not all parents present
        
        if not all(tx_hash in known_txs for tx_hash in other_tx_hashes):
            return None # not all txs present
        
        return [known_txs[tx_hash] for tx_hash in other_tx_hashes]
    
    def should_punish_reason(self, previous_block, bits, tracker, known_txs):
        if self.pow_hash <= self.header['bits'].target:
            return -1, 'block solution'
        
        other_txs = self._get_other_txs(tracker, known_txs)
        if other_txs is None:
            pass
        else:
            all_txs_size = sum(bitcoin_data.tx_type.packed_size(tx) for tx in other_txs)
            stripped_txs_size = sum(bitcoin_data.tx_id_type.packed_size(tx) for tx in other_txs)
            if p2pool.DEBUG:
                print "stripped_txs_size = %i, all_txs_size = %i, weight = %i" % (stripped_txs_size, all_txs_size, all_txs_size + 3 * stripped_txs_size)
                print "Block size = %i, block weight = %i" %(stripped_txs_size + 80 + self.gentx_size , all_txs_size + 3 * stripped_txs_size + 4*80 + self.gentx_weight)
            if all_txs_size + 3 * stripped_txs_size + 4*80 + self.gentx_weight > tracker.net.BLOCK_MAX_WEIGHT:
                return True, 'txs over block weight limit'
            if stripped_txs_size + 80 + self.gentx_size > tracker.net.BLOCK_MAX_SIZE:
                return True, 'txs over block size limit'
        
        return False, None
    
    def as_block(self, tracker, known_txs):
        other_txs = self._get_other_txs(tracker, known_txs)
        if other_txs is None:
            return None # not all txs present
        return dict(header=self.header, txs=[self.check(tracker, other_txs)] + other_txs)
Esempio n. 9
0
File: p2p.py Progetto: phiten/p2pool
class Protocol(p2protocol.Protocol):
    def __init__(self, net):
        p2protocol.Protocol.__init__(self,
                                     net.P2P_PREFIX,
                                     1000000,
                                     ignore_trailing_payload=True)

    def connectionMade(self):
        self.send_version(version=80001,
                          services=1,
                          time=int(time.time()),
                          addr_to=dict(
                              services=1,
                              address=self.transport.getPeer().host,
                              port=self.transport.getPeer().port,
                          ),
                          addr_from=dict(
                              services=1,
                              address=self.transport.getHost().host,
                              port=self.transport.getHost().port,
                          ),
                          nonce=random.randrange(2**64),
                          extra=64)

    class ExtraDataType(pack.Type):
        def read(self, file):
            b = pack.IntType(8)
            res = 0
            while True:
                try:
                    _, file = b.read(file)
                    res += 1
                except Exception:
                    break
            return res, file

        def write(self, file, item):
            d = pack.FixedStrType(item)
            return d.write(file, '\0' * item)

    message_version = pack.ComposedType([
        ('version', pack.IntType(32)), ('services', pack.IntType(64)),
        ('time', pack.IntType(64)), ('addr_to', bitcoin_data.address_type),
        ('addr_from', bitcoin_data.address_type), ('nonce', pack.IntType(64)),
        ('extra', ExtraDataType())
    ])

    def handle_version(self, version, services, time, addr_to, addr_from,
                       nonce, extra):
        self.send_verack()

    message_verack = pack.ComposedType([])

    def handle_verack(self):
        self.get_block = deferral.ReplyMatcher(lambda hash: self.send_getdata(
            requests=[dict(type='block', hash=hash)]))
        self.get_block_header = deferral.ReplyMatcher(
            lambda hash: self.send_getheaders(version=1, have=[], last=hash))

        if hasattr(self.factory, 'resetDelay'):
            self.factory.resetDelay()
        if hasattr(self.factory, 'gotConnection'):
            self.factory.gotConnection(self)

        self.pinger = deferral.RobustLoopingCall(self.send_ping, nonce=1234)
        self.pinger.start(30)

    message_inv = pack.ComposedType([
        ('invs',
         pack.ListType(
             pack.ComposedType([
                 ('type', pack.EnumType(pack.IntType(32), {
                     1: 'tx',
                     2: 'block'
                 })),
                 ('hash', pack.IntType(256)),
             ]))),
    ])

    def handle_inv(self, invs):
        for inv in invs:
            if inv['type'] == 'tx':
                self.send_getdata(requests=[inv])
            elif inv['type'] == 'block':
                self.factory.new_block.happened(inv['hash'])
            else:
                print 'Unknown inv type', inv

    message_getdata = pack.ComposedType([
        ('requests',
         pack.ListType(
             pack.ComposedType([
                 ('type', pack.EnumType(pack.IntType(32), {
                     1: 'tx',
                     2: 'block'
                 })),
                 ('hash', pack.IntType(256)),
             ]))),
    ])
    message_getblocks = pack.ComposedType([
        ('version', pack.IntType(32)),
        ('have', pack.ListType(pack.IntType(256))),
        ('last', pack.PossiblyNoneType(0, pack.IntType(256))),
    ])
    message_getheaders = pack.ComposedType([
        ('version', pack.IntType(32)),
        ('have', pack.ListType(pack.IntType(256))),
        ('last', pack.PossiblyNoneType(0, pack.IntType(256))),
    ])
    message_getaddr = pack.ComposedType([])

    message_addr = pack.ComposedType([
        ('addrs',
         pack.ListType(
             pack.ComposedType([
                 ('timestamp', pack.IntType(32)),
                 ('address', bitcoin_data.address_type),
             ]))),
    ])

    def handle_addr(self, addrs):
        for addr in addrs:
            pass

    message_tx = pack.ComposedType([
        ('tx', bitcoin_data.tx_type),
    ])

    def handle_tx(self, tx):
        self.factory.new_tx.happened(tx)

    message_block = pack.ComposedType([
        ('block', bitcoin_data.block_type),
    ])

    def handle_block(self, block):
        block_hash = bitcoin_data.hash256(
            bitcoin_data.block_header_type.pack(block['header']))
        self.get_block.got_response(block_hash, block)
        self.get_block_header.got_response(block_hash, block['header'])

    message_headers = pack.ComposedType([
        ('headers', pack.ListType(bitcoin_data.block_type)),
    ])

    def handle_headers(self, headers):
        for header in headers:
            header = header['header']
            self.get_block_header.got_response(
                bitcoin_data.hash256(
                    bitcoin_data.block_header_type.pack(header)), header)
        self.factory.new_headers.happened(
            [header['header'] for header in headers])

    message_ping = pack.ComposedType([
        ('nonce', pack.IntType(64)),
    ])

    def handle_ping(self, nonce):
        self.send_pong(nonce=nonce)

    message_pong = pack.ComposedType([
        ('nonce', pack.IntType(64)),
    ])

    def handle_pong(self, nonce):
        pass

    message_alert = pack.ComposedType([
        ('message', pack.VarStrType()),
        ('signature', pack.VarStrType()),
    ])

    def handle_alert(self, message, signature):
        pass  # print 'ALERT:', (message, signature)

    message_reject = pack.ComposedType([
        ('message', pack.VarStrType()),
        ('ccode', pack.IntType(8)),
        ('reason', pack.VarStrType()),
        ('data', pack.IntType(256)),
    ])

    def handle_reject(self, message, ccode, reason, data):
        if True:
            print >> sys.stderr, 'Received reject message (%s): %s' % (message,
                                                                       reason)

    def connectionLost(self, reason):
        if hasattr(self.factory, 'gotConnection'):
            self.factory.gotConnection(None)
        if hasattr(self, 'pinger'):
            self.pinger.stop()
        if True:
            print >> sys.stderr, 'Bitcoin connection lost. Reason:', reason.getErrorMessage(
            )
Esempio n. 10
0
class Share(object):
    small_block_header_type = pack.ComposedType([
        ('version', pack.VarIntType()),  # XXX must be constrained to 32 bits
        ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('timestamp', pack.IntType(32)),
        ('bits', bitcoin_data.FloatingIntegerType()),
        ('nonce', pack.IntType(32)),
    ])

    share_data_type = pack.ComposedType([
        ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('coinbase', pack.VarStrType()),
        ('nonce', pack.IntType(32)),
        ('pubkey_hash', pack.IntType(160)),
        ('subsidy', pack.IntType(64)),
        ('donation', pack.IntType(16)),
        ('stale_info',
         pack.EnumType(
             pack.IntType(8),
             dict((k, {
                 0: None,
                 253: 'orphan',
                 254: 'doa'
             }.get(k, 'unk%i' % (k, ))) for k in xrange(256)))),
        ('desired_version', pack.VarIntType()),
    ])

    share_info_type = pack.ComposedType([
        ('share_data', share_data_type),
        ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('max_bits', bitcoin_data.FloatingIntegerType()),
        ('bits', bitcoin_data.FloatingIntegerType()),
        ('timestamp', pack.IntType(32)),
    ])

    share_common_type = pack.ComposedType([
        ('min_header', small_block_header_type),
        ('share_info', share_info_type),
        ('ref_merkle_link',
         pack.ComposedType([
             ('branch', pack.ListType(pack.IntType(256))),
             ('index', pack.VarIntType()),
         ])),
        ('hash_link', hash_link_type),
    ])
    share1a_type = pack.ComposedType([
        ('common', share_common_type),
        (
            'merkle_link',
            pack.ComposedType([
                ('branch', pack.ListType(pack.IntType(256))),
                ('index', pack.IntType(0)),  # it will always be 0
            ])),
    ])
    share1b_type = pack.ComposedType([
        ('common', share_common_type),
        ('other_txs', pack.ListType(bitcoin_data.tx_type)),
    ])

    ref_type = pack.ComposedType([
        ('identifier', pack.FixedStrType(64 // 8)),
        ('share_info', share_info_type),
    ])

    gentx_before_refhash = pack.VarStrType().pack(
        DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack(
            '\x20' + pack.IntType(256).pack(0))[:2]

    @classmethod
    def generate_transaction(cls, tracker, share_data, block_target,
                             desired_timestamp, desired_target,
                             ref_merkle_link, other_transaction_hashes, net):
        previous_share = tracker.items[
            share_data['previous_share_hash']] if share_data[
                'previous_share_hash'] is not None else None

        height, last = tracker.get_height_and_last(
            share_data['previous_share_hash'])
        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(
                tracker,
                share_data['previous_share_hash'],
                net.TARGET_LOOKBEHIND,
                min_work=True,
                integer=True)
            pre_target = 2**256 // (
                net.SHARE_PERIOD *
                attempts_per_second) - 1 if attempts_per_second else 2**256 - 1
            pre_target2 = math.clip(pre_target,
                                    (previous_share.max_target * 9 // 10,
                                     previous_share.max_target * 11 // 10))
            pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(
            pre_target3)
        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(
            math.clip(desired_target, (pre_target3 // 10, pre_target3)))

        weights, total_weight, donation_weight = tracker.get_cumulative_weights(
            share_data['previous_share_hash'],
            min(height, net.REAL_CHAIN_LENGTH),
            65535 * net.SPREAD *
            bitcoin_data.target_to_average_attempts(block_target),
        )
        assert total_weight == sum(weights.itervalues()) + donation_weight, (
            total_weight, sum(weights.itervalues()) + donation_weight)

        amounts = dict(
            (script,
             share_data['subsidy'] * (199 * weight) // (200 * total_weight))
            for script, weight in weights.iteritems(
            ))  # 99.5% goes according to weights prior to this share
        this_script = bitcoin_data.pubkey_hash_to_script2(
            share_data['pubkey_hash'])
        amounts[this_script] = amounts.get(
            this_script,
            0) + share_data['subsidy'] // 200  # 0.5% goes to block finder
        amounts[DONATION_SCRIPT] = amounts.get(
            DONATION_SCRIPT, 0
        ) + share_data['subsidy'] - sum(
            amounts.itervalues()
        )  # all that's left over is the donation weight and some extra satoshis due to rounding

        if sum(amounts.itervalues()) != share_data['subsidy'] or any(
                x < 0 for x in amounts.itervalues()):
            raise ValueError()

        dests = sorted(
            amounts.iterkeys(),
            key=lambda script:
            (script == DONATION_SCRIPT, amounts[script], script))[
                -4000:]  # block length limit, unlikely to ever be hit

        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else
            tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,
            timestamp=math.clip(
                desired_timestamp,
                (
                    (previous_share.timestamp + net.SHARE_PERIOD) -
                    (net.SHARE_PERIOD - 1),  # = previous_share.timestamp + 1
                    (previous_share.timestamp + net.SHARE_PERIOD) +
                    (net.SHARE_PERIOD - 1),
                )) if previous_share is not None else desired_timestamp,
        )

        gentx = dict(
            version=1,
            tx_ins=[
                dict(
                    previous_output=None,
                    sequence=None,
                    script=share_data['coinbase'].ljust(2, '\x00'),
                )
            ],
            tx_outs=[
                dict(value=amounts[script], script=script) for script in dests
                if amounts[script] or script == DONATION_SCRIPT
            ] + [
                dict(
                    value=0,
                    script='\x20' +
                    cls.get_ref_hash(net, share_info, ref_merkle_link),
                )
            ],
            lock_time=0,
        )

        def get_share(header, transactions):
            assert transactions[0] == gentx and [
                bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
                for tx in transactions[1:]
            ] == other_transaction_hashes
            min_header = dict(header)
            del min_header['merkle_root']
            hash_link = prefix_to_hash_link(
                bitcoin_data.tx_type.pack(gentx)[:-32 - 4],
                cls.gentx_before_refhash)
            merkle_link = bitcoin_data.calculate_merkle_link(
                [None] + other_transaction_hashes, 0)
            pow_hash = net.PARENT.POW_FUNC(
                bitcoin_data.block_header_type.pack(header))
            return cls(net,
                       None,
                       dict(
                           min_header=min_header,
                           share_info=share_info,
                           hash_link=hash_link,
                           ref_merkle_link=dict(branch=[], index=0),
                       ),
                       merkle_link=merkle_link,
                       other_txs=transactions[1:]
                       if pow_hash <= header['bits'].target else None)

        return share_info, gentx, other_transaction_hashes, get_share

    @classmethod
    def get_ref_hash(cls, net, share_info, ref_merkle_link):
        return pack.IntType(256).pack(
            bitcoin_data.check_merkle_link(
                bitcoin_data.hash256(
                    cls.ref_type.pack(
                        dict(
                            identifier=net.IDENTIFIER,
                            share_info=share_info,
                        ))), ref_merkle_link))

    __slots__ = 'net peer common min_header share_info hash_link merkle_link other_txs hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash time_seen'.split(
        ' ')

    def __init__(self, net, peer, common, merkle_link, other_txs):
        self.net = net
        self.peer = peer
        self.common = common
        self.min_header = common['min_header']
        self.share_info = common['share_info']
        self.hash_link = common['hash_link']
        self.merkle_link = merkle_link
        self.other_txs = other_txs

        if len(self.share_info['share_data']['coinbase']) > 100:
            raise ValueError(
                '''coinbase too large! %i bytes''' %
                (len(self.share_info['share_data']['coinbase']), ))

        if len(merkle_link['branch']) > 16:
            raise ValueError('merkle branch too long!')

        if p2pool.DEBUG and other_txs is not None and bitcoin_data.calculate_merkle_link(
            [0] + [
                bitcoin_data.hash256(bitcoin_data.tx_type.pack(x))
                for x in other_txs
            ], 0) != merkle_link:
            raise ValueError('merkle_link and other_txs do not match')

        assert not self.hash_link['extra_data'], repr(
            self.hash_link['extra_data'])

        self.share_data = self.share_info['share_data']
        self.max_target = self.share_info['max_bits'].target
        self.target = self.share_info['bits'].target
        self.timestamp = self.share_info['timestamp']
        self.previous_hash = self.share_data['previous_share_hash']
        self.new_script = bitcoin_data.pubkey_hash_to_script2(
            self.share_data['pubkey_hash'])
        self.desired_version = self.share_data['desired_version']

        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info, common['ref_merkle_link'])
            + pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash,
                                                     merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(
            bitcoin_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = bitcoin_data.hash256(
            bitcoin_data.block_header_type.pack(self.header))

        if self.pow_hash > self.target:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share PoW invalid')

        if other_txs is not None and not self.pow_hash <= self.header[
                'bits'].target:
            raise ValueError('other_txs provided when not a block solution')
        if other_txs is None and self.pow_hash <= self.header['bits'].target:
            raise ValueError('other_txs not provided when a block solution')

        # XXX eww
        self.time_seen = time.time()

    def __repr__(self):
        return '<Share %s>' % (' '.join('%s=%r' % (k, getattr(self, k))
                                        for k in self.__slots__), )

    def as_share1a(self):
        return dict(type=4,
                    contents=self.share1a_type.pack(
                        dict(common=self.common,
                             merkle_link=self.merkle_link)))

    def as_share1b(self):
        return dict(type=5,
                    contents=self.share1b_type.pack(
                        dict(common=self.common, other_txs=self.other_txs)))

    def as_share(self):
        if not self.pow_hash <= self.header['bits'].target:  # share1a
            return self.as_share1a()
        else:
            return self.as_share1b()

    def check(self, tracker):
        share_info, gentx, other_transaction_hashes, get_share = self.generate_transaction(
            tracker, self.share_info['share_data'], self.header['bits'].target,
            self.share_info['timestamp'], self.share_info['bits'].target,
            self.common['ref_merkle_link'], [], self.net
        )  # ok because other_transaction_hashes is only used in get_share
        if share_info != self.share_info:
            raise ValueError('share_info invalid')
        if bitcoin_data.hash256(
                bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash:
            raise ValueError('''gentx doesn't match hash_link''')
        return gentx  # only used by as_block

    def as_block(self, tracker):
        if self.other_txs is None:
            raise ValueError('share does not contain all txs')
        return dict(header=self.header,
                    txs=[self.check(tracker)] + self.other_txs)