Esempio n. 1
0
 def test_tx_hash(self):
     assert data.get_txid(
         dict(
             version=1,
             tx_ins=[
                 dict(
                     previous_output=None,
                     sequence=None,
                     script=
                     '70736a0468860e1a0452389500522cfabe6d6d2b2f33cf8f6291b184f1b291d24d82229463fcec239afea0ee34b4bfc622f62401000000000000004d696e656420627920425443204775696c6420ac1eeeed88'
                     .decode('hex'),
                 )
             ],
             tx_outs=[
                 dict(
                     value=5003880250,
                     script=data.pubkey_hash_to_script2(
                         pack.IntType(160).unpack(
                             'ca975b00a8c203b8692f5a18d92dc5c2d2ebc57b'.
                             decode('hex'))),
                 )
             ],
             lock_time=0,
         )
     ) == 0xb53802b2333e828d6532059f46ecf6b313a42d79f97925e457fbbfda45367e5c
Esempio n. 2
0
    def __init__(self, net, peer_addr, contents):
        self.net = net
        self.peer_addr = peer_addr
        self.contents = contents

        self.min_header = contents["min_header"]
        self.share_info = contents["share_info"]
        self.hash_link = contents["hash_link"]
        self.merkle_link = contents["merkle_link"]

        if not (2 <= len(self.share_info["share_data"]["coinbase"]) <= 100):
            raise ValueError("""bad coinbase size! %i bytes""" % (len(self.share_info["share_data"]["coinbase"]),))

        if len(self.merkle_link["branch"]) > 16:
            raise ValueError("merkle branch too long!")

        assert not self.hash_link["extra_data"], repr(self.hash_link["extra_data"])

        self.share_data = self.share_info["share_data"]
        self.max_target = self.share_info["max_bits"].target
        self.target = self.share_info["bits"].target
        self.timestamp = self.share_info["timestamp"]
        self.previous_hash = self.share_data["previous_share_hash"]
        self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data["pubkey_hash"])
        self.desired_version = self.share_data["desired_version"]

        n = set()
        for share_count, tx_count in self.iter_transaction_hash_refs():
            assert share_count < 110
            if share_count == 0:
                n.add(tx_count)
        assert n == set(range(len(self.share_info["new_transaction_hashes"])))

        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info, contents["ref_merkle_link"])
            + pack.IntType(32).pack(self.contents["last_txout_nonce"])
            + pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))

        if self.target > net.MAX_TARGET:
            from p2pool import p2p

            raise p2p.PeerMisbehavingError("share target invalid")

        if self.pow_hash > self.target:
            from p2pool import p2p

            raise p2p.PeerMisbehavingError("share PoW invalid")

        self.new_transaction_hashes = self.share_info["new_transaction_hashes"]

        # XXX eww
        self.time_seen = time.time()
Esempio n. 3
0
 def __init__(self, net, peer_addr, contents):
     self.net = net
     self.peer_addr = peer_addr
     self.contents = contents
     
     self.min_header = contents['min_header']
     self.share_info = contents['share_info']
     self.hash_link = contents['hash_link']
     self.merkle_link = contents['merkle_link']
     
     if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
         raise ValueError('''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']),))
     
     if len(self.merkle_link['branch']) > 16:
         raise ValueError('merkle branch too long!')
     
     assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data'])
     
     self.share_data = self.share_info['share_data']
     self.max_target = self.share_info['max_bits'].target
     self.target = self.share_info['bits'].target
     self.timestamp = self.share_info['timestamp']
     self.previous_hash = self.share_data['previous_share_hash']
     self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
     self.desired_version = self.share_data['desired_version']
     self.absheight = self.share_info['absheight']
     self.abswork = self.share_info['abswork']
     
     n = set()
     for share_count, tx_count in self.iter_transaction_hash_refs():
         assert share_count < 110
         if share_count == 0:
             n.add(tx_count)
     assert n == set(range(len(self.share_info['new_transaction_hashes'])))
     
     self.gentx_hash = check_hash_link(
         self.hash_link,
         self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0),
         self.gentx_before_refhash,
     )
     merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.merkle_link)
     self.header = dict(self.min_header, merkle_root=merkle_root)
     self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
     #self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
     self.hash = self.header_hash = net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(self.header))
     
     if self.target > net.MAX_TARGET:
         from p2pool import p2p
         raise p2p.PeerMisbehavingError('share target invalid')
     
     if self.pow_hash > self.target:
         from p2pool import p2p
         raise p2p.PeerMisbehavingError('share PoW invalid')
     
     self.new_transaction_hashes = self.share_info['new_transaction_hashes']
     
     # XXX eww
     self.time_seen = time.time()
Esempio n. 4
0
 def __init__(self, net, peer_addr, contents):
     self.net = net
     self.peer_addr = peer_addr
     self.contents = contents
     
     self.min_header = contents['min_header']
     self.share_info = contents['share_info']
     self.hash_link = contents['hash_link']
     self.merkle_link = contents['merkle_link']
     
     if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
         raise ValueError('''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']),))
     
     if len(self.merkle_link['branch']) > 16:
         raise ValueError('merkle branch too long!')
     
     assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data'])
     
     self.share_data = self.share_info['share_data']
     self.max_target = self.share_info['max_bits'].target
     self.target = self.share_info['bits'].target
     self.timestamp = self.share_info['timestamp']
     self.previous_hash = self.share_data['previous_share_hash']
     self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
     self.desired_version = self.share_data['desired_version']
     self.absheight = self.share_info['absheight']
     self.abswork = self.share_info['abswork']
     
     n = set()
     for share_count, tx_count in self.iter_transaction_hash_refs():
         assert share_count < 110
         if share_count == 0:
             n.add(tx_count)
     assert n == set(range(len(self.share_info['new_transaction_hashes'])))
     
     self.gentx_hash = check_hash_link(
         self.hash_link,
         self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0),
         self.gentx_before_refhash,
     )
     merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.merkle_link)
     self.header = dict(self.min_header, merkle_root=merkle_root)
     self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
     #self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
     self.hash = self.header_hash = net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(self.header))
     
     if self.target > net.MAX_TARGET:
         from p2pool import p2p
         raise p2p.PeerMisbehavingError('share target invalid')
     
     if self.pow_hash > self.target:
         from p2pool import p2p
         raise p2p.PeerMisbehavingError('share PoW invalid')
     
     self.new_transaction_hashes = self.share_info['new_transaction_hashes']
     
     # XXX eww
     self.time_seen = time.time()
Esempio n. 5
0
def get_blocks2(n):
    height = yield blockchain('q/getblockcount')
    
    res = []
    
    for i in xrange(n):
        x = yield blockchain('block-height/%i?format=json' % (height - i,))
        for block in x['blocks']:
            #print block
            header = dict(
                version=block['ver'],
                previous_block=int(block['prev_block'], 16),
                merkle_root=int(block['mrkl_root'], 16),
                timestamp=block['time'],
                bits=bitcoin_data.FloatingInteger(block['bits']),
                nonce=block['nonce'],
            )
            assert bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)) == int(block['hash'], 16)
            
            
            # there seems to be no way to get the raw transaction
            # from blockchain.info (?format=hex doesn't work for
            # coinbase transctions ): so instead fake it
            
            
            txs = [dict(
                version=tx['ver'],
                tx_ins=[dict(
                    previous_output=None,
                    script='',
                    sequence=0,
                ) for tx_in in tx['inputs']],
                tx_outs=[dict(
                    value=tx_out['value'],
                    script='\x6a' + 'blah'*100 if tx_out['type'] == -1 else
                        p2pool_data.DONATION_SCRIPT if tx_out['addr'] == bitcoin_data.script2_to_address(p2pool_data.DONATION_SCRIPT, net) else
                        bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(tx_out['addr'], net)),
                ) for tx_out in tx['out']],
                lock_time=0,
            ) for tx in block['tx']]
            
            #print txs[0]
            
            # fails because we don't have coinbase script ):
            #assert bitcoin_data.hash256(bitcoin_data.tx_type.pack(txs[0])) == block['tx'][0]['hash']
            
            block2 = dict(header=header, txs=txs)
            
            res.append(dict(
                block=block2,
                height=block['height'],
                gentx_hash=int(block['tx'][0]['hash'], 16),
            ))
    
    defer.returnValue(res)
Esempio n. 6
0
 def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, net):
     previous_share = tracker.shares[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
     
     height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
     assert height >= net.REAL_CHAIN_LENGTH or last is None
     if height < net.TARGET_LOOKBEHIND:
         pre_target3 = net.MAX_TARGET
     else:
         attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
         pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
         pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
         pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
     max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
     bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//10, pre_target3)))
     
     weights, total_weight, donation_weight = tracker.get_cumulative_weights(share_data['previous_share_hash'],
         min(height, net.REAL_CHAIN_LENGTH),
         65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
     )
     assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
     
     amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
     this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
     amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
     amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
     
     if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
         raise ValueError()
     
     dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit
     
     share_info = dict(
         share_data=share_data,
         far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
         max_bits=max_bits,
         bits=bits,
         timestamp=math.clip(desired_timestamp, (
             (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
             (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
         )) if previous_share is not None else desired_timestamp,
     )
     
     return share_info, dict(
         version=1,
         tx_ins=[dict(
             previous_output=None,
             sequence=None,
             script=share_data['coinbase'].ljust(2, '\x00'),
         )],
         tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT] + [dict(
             value=0,
             script='\x20' + cls.get_ref_hash(net, share_info, ref_merkle_link),
         )],
         lock_time=0,
     )
Esempio n. 7
0
 def __init__(self, net, peer, common, merkle_link, other_txs):
     self.net = net
     self.peer = peer
     self.common = common
     self.min_header = common['min_header']
     self.share_info = common['share_info']
     self.hash_link = common['hash_link']
     self.merkle_link = merkle_link
     self.other_txs = other_txs
     
     if len(self.share_info['share_data']['coinbase']) > 100:
         raise ValueError('''coinbase too large! %i bytes''' % (len(self.self.share_data['coinbase']),))
     
     if len(merkle_link['branch']) > 16:
         raise ValueError('merkle branch too long!')
     
     if p2pool.DEBUG and other_txs is not None and bitcoin_data.calculate_merkle_link([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in other_txs], 0) != merkle_link:
         raise ValueError('merkle_link and other_txs do not match')
     
     assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data'])
     
     self.share_data = self.share_info['share_data']
     self.max_target = self.share_info['max_bits'].target
     self.target = self.share_info['bits'].target
     self.timestamp = self.share_info['timestamp']
     self.previous_hash = self.share_data['previous_share_hash']
     self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
     self.desired_version = self.share_data['desired_version']
     
     if self.timestamp < net.SWITCH_TIME:
         from p2pool import p2p
         raise p2p.PeerMisbehavingError('peer sent a new-style share with a timestamp before the switch time')
     
     self.gentx_hash = check_hash_link(
         self.hash_link,
         self.get_ref_hash(net, self.share_info, common['ref_merkle_link']) + pack.IntType(32).pack(0),
         self.gentx_before_refhash,
     )
     merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, merkle_link)
     self.header = dict(self.min_header, merkle_root=merkle_root)
     self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
     self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
     
     if self.pow_hash > self.target:
         raise p2p.PeerMisbehavingError('share PoW invalid')
     
     if other_txs is not None and not self.pow_hash <= self.header['bits'].target:
         raise ValueError('other_txs provided when not a block solution')
     if other_txs is None and self.pow_hash <= self.header['bits'].target:
         raise ValueError('other_txs not provided when a block solution')
     
     # XXX eww
     self.time_seen = time.time()
Esempio n. 8
0
 def test_tx_hash(self):
     assert data.get_txid(dict(
         version=1,
         tx_ins=[dict(
             previous_output=None,
             sequence=None,
             script='70736a0468860e1a0452389500522cfabe6d6d2b2f33cf8f6291b184f1b291d24d82229463fcec239afea0ee34b4bfc622f62401000000000000004d696e656420627920425443204775696c6420ac1eeeed88'.decode('hex'),
         )],
         tx_outs=[dict(
             value=5003880250,
             script=data.pubkey_hash_to_script2(pack.IntType(160).unpack('ca975b00a8c203b8692f5a18d92dc5c2d2ebc57b'.decode('hex'))),
         )],
         lock_time=0,
     )) == 0xb53802b2333e828d6532059f46ecf6b313a42d79f97925e457fbbfda45367e5c
Esempio n. 9
0
    merkle_root=
    0x916fb05aed6373d6e5c49626efc0dea5d103038135bffbc86014f5204df2ebe3,
    timestamp=1367995782,
    bits=data.FloatingInteger(0x1e0fffff),
    nonce=58004,
)

DONATION_SCRIPT = '01210241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd69021036fbd9d0a34a569f10b0431c8aeecf74ad796b99838b7272ef35ded130a794f9b02ae'.decode(
    'hex')
print DONATION_SCRIPT[2:35].encode('hex')
print data.pubkey_to_address(DONATION_SCRIPT[2:35], networks.nets['yacoin'])
print networks.nets['yacoin'].POW_FUNC(
    data.block_header_type.pack(block_header))

print data.pubkey_hash_to_script2(
    data.address_to_pubkey_hash('YJL3vTFn7m82zQRs7XAXcJXnBNNmZdb1Ty',
                                networks.nets['yacoin'])).encode('hex')

donate = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode(
    'hex')
#print data.script2_to_address(donate, networks.nets['bitcoin'])
#print len("c8c6a3c0957d53698da14d7a2c176a133e92fc53".decode('hex'))
#print donate[1:-1].encode('hex')
#print data.pubkey_to_script2(donate[1:-1]).encode('hex')
#print donate[3:-2].encode('hex')
#print data.pubkey_hash_to_script2(donate[3:-2]).encode('hex')
#print len('0241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd690'.decode('hex'))
#print data.pubkey_to_script2('0241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd690'
#036fbd9d0a34a569f10b0431c8aeecf74ad796b99838b7272ef35ded130a794f9b
print data.base58_encode(
    'cac8509b8f959d14253e934585c52d04bf4ae21d28e62414a0c71c9fb80a5713'.decode(
Esempio n. 10
0
def main(args, net, datadir_path, merged_urls, worker_endpoint):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__,)
        print
        
        @defer.inlineCallbacks
        def connect_p2p():
            # connect to bitcoind over bitcoin-p2p
            print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
            factory = bitcoin_p2p.ClientFactory(net.PARENT)
            reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
            def long():
                print '''    ...taking a while. Common reasons for this include all of bitcoind's connection slots being used...'''
            long_dc = reactor.callLater(5, long)
            yield factory.getProtocol() # waits until handshake is successful
            if not long_dc.called: long_dc.cancel()
            print '    ...success!'
            print
            defer.returnValue(factory)
        
        if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
            factory = yield connect_p2p()
        
        # connect to bitcoind over JSON-RPC and do initial getmemorypool
        url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
        print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
        bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
        print base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)
        yield helper.check(bitcoind, net)
        temp_work = yield helper.getwork(bitcoind)
        
        bitcoind_getinfo_var = variable.Variable(None)
        @defer.inlineCallbacks
        def poll_warnings():
            bitcoind_getinfo_var.set((yield deferral.retry('Error while calling getinfo:')(bitcoind.rpc_getnetworkinfo)()))
        yield poll_warnings()
        deferral.RobustLoopingCall(poll_warnings).start(20*60)
        
        print '    ...success!'
        print '    Current block hash  : %x' % (temp_work['previous_block'],)
        print '    Current block height: %i' % (temp_work['height'] - 1,)
        print '    Current block bits  : %s' % (temp_work['bits'],)
        print

        if not args.testnet:
            factory = yield connect_p2p()
        
        print 'Determining payout address...'
        pubkeys = keypool()
        if args.pubkey_hash is None and args.address != 'dynamic':
            address_path = os.path.join(datadir_path, 'cached_payout_address')
            
            if os.path.exists(address_path):
                with open(address_path, 'rb') as f:
                    address = f.read().strip('\r\n')
                print '    Loaded cached address: %s...' % (address,)
            else:
                address = None
            
            if address is not None:
                res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
                if not res['isvalid'] or not res['ismine']:
                    print '    Cached address is either invalid or not controlled by local bitcoind!'
                    address = None
            
            if address is None:
                print '    Getting payout address from bitcoind...'
                address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
            
            with open(address_path, 'wb') as f:
                f.write(address)
            
            my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
            print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
            print
            pubkeys.addkey(my_pubkey_hash)
        elif args.address != 'dynamic':
            my_pubkey_hash = args.pubkey_hash
            print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
            print
            pubkeys.addkey(my_pubkey_hash)
        else:
            print '    Entering dynamic address mode.'

            if args.numaddresses < 2:
                print ' ERROR: Can not use fewer than 2 addresses in dynamic mode. Resetting to 2.'
                args.numaddresses = 2
            for i in range(args.numaddresses):
                address = yield deferral.retry('Error getting a dynamic address from bitcoind:', 5)(lambda: bitcoind.rpc_getnewaddress('p2pool'))()
                new_pubkey = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
                pubkeys.addkey(new_pubkey)

            pubkeys.updatestamp(time.time())

            my_pubkey_hash = pubkeys.keys[0]

            for i in range(len(pubkeys.keys)):
                print '    ...payout %d: %s' % (i, bitcoin_data.pubkey_hash_to_address(pubkeys.keys[i], net.PARENT),)
        
        print "Loading shares..."
        shares = {}
        known_verified = set()
        def share_cb(share):
            share.time_seen = 0 # XXX
            shares[share.hash] = share
            if len(shares) % 1000 == 0 and shares:
                print "    %i" % (len(shares),)
        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add)
        print "    ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified))
        print
        
        
        print 'Initializing work...'
        
        node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net)
        yield node.start()
        
        for share_hash in shares:
            if share_hash not in node.tracker.items:
                ss.forget_share(share_hash)
        for share_hash in known_verified:
            if share_hash not in node.tracker.verified.items:
                ss.forget_verified_share(share_hash)
        node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
        node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
        
        def save_shares():
            for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)):
                ss.add_share(share)
                if share.hash in node.tracker.verified.items:
                    ss.add_verified_hash(share.hash)
        deferral.RobustLoopingCall(save_shares).start(60)

        if len(shares) > net.CHAIN_LENGTH:
            best_share = shares[node.best_share_var.value]
            previous_share = shares[best_share.share_data['previous_share_hash']]
            counts = p2pool_data.get_desired_version_counts(node.tracker, node.tracker.get_nth_parent_hash(previous_share.hash, net.CHAIN_LENGTH*9//10), net.CHAIN_LENGTH//10)
            p2pool_data.update_min_protocol_version(counts, best_share)
        
        print '    ...success!'
        print
        
        
        print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
        
        @defer.inlineCallbacks
        def parse(host):
            port = net.P2P_PORT
            if ':' in host:
                host, port_str = host.split(':')
                port = int(port_str)
            defer.returnValue(((yield reactor.resolve(host)), port))
        
        addrs = {}
        if os.path.exists(os.path.join(datadir_path, 'addrs')):
            try:
                with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
                    addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
            except:
                print >>sys.stderr, 'error parsing addrs'
        for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
            try:
                addr = yield addr_df
                if addr not in addrs:
                    addrs[addr] = (0, time.time(), time.time())
            except:
                log.err()
        
        connect_addrs = set()
        for addr_df in map(parse, args.p2pool_nodes):
            try:
                connect_addrs.add((yield addr_df))
            except:
                log.err()
        
        node.p2p_node = p2pool_node.P2PNode(node,
            port=args.p2pool_port,
            max_incoming_conns=args.p2pool_conns,
            addr_store=addrs,
            connect_addrs=connect_addrs,
            desired_outgoing_conns=args.p2pool_outgoing_conns,
            advertise_ip=args.advertise_ip,
            external_ip=args.p2pool_external_ip,
        )
        node.p2p_node.start()
        
        def save_addrs():
            with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
                f.write(json.dumps(node.p2p_node.addr_store.items()))
        deferral.RobustLoopingCall(save_addrs).start(60)
        
        print '    ...success!'
        print
        
        if args.upnp:
            @defer.inlineCallbacks
            def upnp_thread():
                while True:
                    try:
                        is_lan, lan_ip = yield ipdiscover.get_local_ip()
                        if is_lan:
                            pm = yield portmapper.get_port_mapper()
                            yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
                    except defer.TimeoutError:
                        pass
                    except:
                        if p2pool.DEBUG:
                            log.err(None, 'UPnP error:')
                    yield deferral.sleep(random.expovariate(1/120))
            upnp_thread()
        
        # start listening for workers with a JSON-RPC server
        
        print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
        
        wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee, args, pubkeys, bitcoind)
        web_root = web.get_web_root(wb, datadir_path, bitcoind_getinfo_var, static_dir=args.web_static)
        caching_wb = worker_interface.CachingWorkerBridge(wb)
        worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
        web_serverfactory = server.Site(web_root)
        
        serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
        deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0])
        
        with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
            pass
        
        print '    ...success!'
        print

        print args.reserve_address
        print args.reserve_percentage
        global_var.set_value('script',pubkey_hash_to_script2(address_to_pubkey_hash(global_var.get_value('reserve_address'),wb.net)))
        
        # done!
        print 'Started successfully!'
        print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
        args.donation_percentage=0
        # if args.donation_percentage > 1.1:
        #     print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
        # elif args.donation_percentage < .9:
        #     print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
        # else:
        #     print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
        #     print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
        print "srcchain donation %.f%%" % args.donation_percentage


        if hasattr(signal, 'SIGALRM'):
            signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
                sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
            ))
            signal.siginterrupt(signal.SIGALRM, False)
            deferral.RobustLoopingCall(signal.alarm, 30).start(1)
        
        if args.irc_announce:
            from twisted.words.protocols import irc
            class IRCClient(irc.IRCClient):
                nickname = 'p2pool%02i' % (random.randrange(100),)
                channel = net.ANNOUNCE_CHANNEL
                def lineReceived(self, line):
                    if p2pool.DEBUG:
                        print repr(line)
                    irc.IRCClient.lineReceived(self, line)
                def signedOn(self):
                    self.in_channel = False
                    irc.IRCClient.signedOn(self)
                    self.factory.resetDelay()
                    self.join(self.channel)
                    @defer.inlineCallbacks
                    def new_share(share):
                        if not self.in_channel:
                            return
                        if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
                            yield deferral.sleep(random.expovariate(1/60))
                            message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
                            if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
                                self.say(self.channel, message)
                                self._remember_message(message)
                    self.watch_id = node.tracker.verified.added.watch(new_share)
                    self.recent_messages = []
                def joined(self, channel):
                    self.in_channel = True
                def left(self, channel):
                    self.in_channel = False
                def _remember_message(self, message):
                    self.recent_messages.append(message)
                    while len(self.recent_messages) > 100:
                        self.recent_messages.pop(0)
                def privmsg(self, user, channel, message):
                    if channel == self.channel:
                        self._remember_message(message)
                def connectionLost(self, reason):
                    node.tracker.verified.added.unwatch(self.watch_id)
                    print 'IRC connection lost:', reason.getErrorMessage()
            class IRCClientFactory(protocol.ReconnectingClientFactory):
                protocol = IRCClient
            reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory(), bindAddress=(worker_endpoint[0], 0))
        
        @defer.inlineCallbacks
        def status_thread():
            last_str = None
            last_time = 0
            while True:
                yield deferral.sleep(3)
                try:
                    height = node.tracker.get_height(node.best_share_var.value)
                    this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
                        height,
                        len(node.tracker.verified.items),
                        len(node.tracker.items),
                        len(node.p2p_node.peers),
                        sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
                    ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
                    
                    datums, dt = wb.local_rate_monitor.get_datums_in_last()
                    my_att_s = sum(datum['work']/dt for datum in datums)
                    my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums)
                    this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
                        math.format(int(my_att_s)),
                        math.format_dt(dt),
                        math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
                        math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???',
                    )
                    
                    if height > 2:
                        (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
                        stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
                        real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
                        
                        paystr = ''
                        paytot = 0.0
                        for i in range(len(pubkeys.keys)):
                            curtot = node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(pubkeys.keys[i]), 0)
                            paytot += curtot*1e-8
                            paystr += "(%.4f)" % (curtot*1e-8,)
                        paystr += "=%.4f" % (paytot,)
                        this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %s %s' % (
                            shares, stale_orphan_shares, stale_doa_shares,
                            math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
                            math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
                            paystr, net.PARENT.SYMBOL,
                        )
                        this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
                            math.format(int(real_att_s)),
                            100*stale_prop,
                            math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s),
                        )
                        
                        for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_getinfo_var.value, node.bitcoind_work.value):
                            print >>sys.stderr, '#'*40
                            print >>sys.stderr, '>>> Warning: ' + warning
                            print >>sys.stderr, '#'*40
                        
                        if gc.garbage:
                            print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage))
                    
                    if this_str != last_str or time.time() > last_time + 15:
                        print this_str
                        last_str = this_str
                        last_time = time.time()
                except:
                    log.err()
        status_thread()
    except:
        reactor.stop()
        log.err(None, 'Fatal error:')
Esempio n. 11
0
def get_payout_script(request, net):
    try:
        user = base64.b64decode(request.getHeader('Authorization').split(' ', 1)[1]).split(':')[0]
        return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
    except: # XXX blah
        return None
Esempio n. 12
0
block_header = dict(
version=3,
previous_block=0xf260c39629d99355c5476d710d46ca0d35b3d962b44054b9ff943fe622,
merkle_root=0x916fb05aed6373d6e5c49626efc0dea5d103038135bffbc86014f5204df2ebe3,
timestamp=1367995782,
bits=data.FloatingInteger(0x1e0fffff),
nonce=58004,
)

DONATION_SCRIPT = '01210241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd69021036fbd9d0a34a569f10b0431c8aeecf74ad796b99838b7272ef35ded130a794f9b02ae'.decode('hex')
print DONATION_SCRIPT[2:35].encode('hex')
print data.pubkey_to_address(DONATION_SCRIPT[2:35], networks.nets['cachecoin'])
print networks.nets['cachecoin'].POW_FUNC(data.block_header_type.pack(block_header)) 

print data.pubkey_hash_to_script2(data.address_to_pubkey_hash('CP8qRQAESbU6CqJvsjh2tCSZWLQnhQd3n3', networks.nets['cachecoin'])).encode('hex')

donate = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
#print data.script2_to_address(donate, networks.nets['bitcoin'])
#print len("c8c6a3c0957d53698da14d7a2c176a133e92fc53".decode('hex'))
#print donate[1:-1].encode('hex')
#print data.pubkey_to_script2(donate[1:-1]).encode('hex')
#print donate[3:-2].encode('hex')
#print data.pubkey_hash_to_script2(donate[3:-2]).encode('hex')
#print len('0241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd690'.decode('hex'))
#print data.pubkey_to_script2('0241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd690'
#036fbd9d0a34a569f10b0431c8aeecf74ad796b99838b7272ef35ded130a794f9b
print data.base58_encode('cac8509b8f959d14253e934585c52d04bf4ae21d28e62414a0c71c9fb80a5713'.decode('hex'))
text = sha256.sha256('cac8509f4ae21d28e62414a0c71c9fb80a5713sdfsdfsdfsdfsfsdssssssssss')
print text.state
print text.buf
Esempio n. 13
0
block_header = dict(
version=3,
previous_block=0xf260c39629d99355c5476d710d46ca0d35b3d962b44054b9ff943fe622,
merkle_root=0x916fb05aed6373d6e5c49626efc0dea5d103038135bffbc86014f5204df2ebe3,
timestamp=1367995782,
bits=data.FloatingInteger(0x1e0fffff),
nonce=58004,
)

DONATION_SCRIPT = '01210241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd69021036fbd9d0a34a569f10b0431c8aeecf74ad796b99838b7272ef35ded130a794f9b02ae'.decode('hex')
print DONATION_SCRIPT[2:35].encode('hex')
print data.pubkey_to_address(DONATION_SCRIPT[2:35], networks.nets['yacoin'])
print networks.nets['yacoin'].POW_FUNC(data.block_header_type.pack(block_header)) 

print data.pubkey_hash_to_script2(data.address_to_pubkey_hash('YJL3vTFn7m82zQRs7XAXcJXnBNNmZdb1Ty', networks.nets['yacoin'])).encode('hex')

donate = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
#print data.script2_to_address(donate, networks.nets['bitcoin'])
#print len("c8c6a3c0957d53698da14d7a2c176a133e92fc53".decode('hex'))
#print donate[1:-1].encode('hex')
#print data.pubkey_to_script2(donate[1:-1]).encode('hex')
#print donate[3:-2].encode('hex')
#print data.pubkey_hash_to_script2(donate[3:-2]).encode('hex')
#print len('0241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd690'.decode('hex'))
#print data.pubkey_to_script2('0241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd690'
#036fbd9d0a34a569f10b0431c8aeecf74ad796b99838b7272ef35ded130a794f9b
print data.base58_encode('cac8509b8f959d14253e934585c52d04bf4ae21d28e62414a0c71c9fb80a5713'.decode('hex'))
text = sha256.sha256('cac8509f4ae21d28e62414a0c71c9fb80a5713sdfsdfsdfsdfsfsdssssssssss')
print text.state
print text.buf
Esempio n. 14
0
        return NewShare(
            net, peer_addr,
            NewShare.get_dynamic_types(net)['share_type'].unpack(
                share['contents']))
    else:
        raise ValueError('unknown share type: %r' % (share['type'], ))


def is_segwit_activated(version, net):
    assert not (version is None or net is None)
    segwit_activation_version = getattr(net, 'SEGWIT_ACTIVATION_VERSION', 0)
    return version >= segwit_activation_version and segwit_activation_version > 0


# DONATION_SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
DONATION_SCRIPT = pubkey_hash_to_script2(
    974143861276139069219601755531477429285430505615)


class BaseShare(object):
    VERSION = 0
    VOTING_VERSION = 0
    SUCCESSOR = None

    MAX_BLOCK_WEIGHT = 4000000
    MAX_NEW_TXS_SIZE = 50000

    small_block_header_type = pack.ComposedType([
        ('version', pack.VarIntType()),
        ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('timestamp', pack.IntType(32)),
        ('bits', bitcoin_data.FloatingIntegerType()),
Esempio n. 15
0
    def __init__(self, net, peer, common, merkle_link, other_txs):
        self.net = net
        self.peer = peer
        self.common = common
        self.min_header = common['min_header']
        self.share_info = common['share_info']
        self.hash_link = common['hash_link']
        self.merkle_link = merkle_link
        self.other_txs = other_txs

        if len(self.share_info['share_data']['coinbase']) > 100:
            raise ValueError(
                '''coinbase too large! %i bytes''' %
                (len(self.share_info['share_data']['coinbase']), ))

        if len(merkle_link['branch']) > 16:
            raise ValueError('merkle branch too long!')

        if p2pool.DEBUG and other_txs is not None and bitcoin_data.calculate_merkle_link(
            [0] + [
                bitcoin_data.hash256(bitcoin_data.tx_type.pack(x))
                for x in other_txs
            ], 0) != merkle_link:
            raise ValueError('merkle_link and other_txs do not match')

        assert not self.hash_link['extra_data'], repr(
            self.hash_link['extra_data'])

        self.share_data = self.share_info['share_data']
        self.max_target = self.share_info['max_bits'].target
        self.target = self.share_info['bits'].target
        self.timestamp = self.share_info['timestamp']
        self.previous_hash = self.share_data['previous_share_hash']
        self.new_script = bitcoin_data.pubkey_hash_to_script2(
            self.share_data['pubkey_hash'])
        self.desired_version = self.share_data['desired_version']

        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info, common['ref_merkle_link'])
            + pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash,
                                                     merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(
            bitcoin_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = bitcoin_data.hash256(
            bitcoin_data.block_header_type.pack(self.header))

        if self.pow_hash > self.target:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share PoW invalid')

        if other_txs is not None and not self.pow_hash <= self.header[
                'bits'].target:
            raise ValueError('other_txs provided when not a block solution')
        if other_txs is None and self.pow_hash <= self.header['bits'].target:
            raise ValueError('other_txs not provided when a block solution')

        # XXX eww
        self.time_seen = time.time()
Esempio n. 16
0
share_type = pack.ComposedType([
    ('type', pack.VarIntType()),
    ('contents', pack.VarStrType()),
])

def load_share(share, net, peer_addr):
    assert peer_addr is None or isinstance(peer_addr, tuple)
    if share['type'] < Share.VERSION:
        from p2pool import p2p
        raise p2p.PeerMisbehavingError('sent an obsolete share')
    elif share['type'] == Share.VERSION:
        return Share(net, peer_addr, Share.share_type.unpack(share['contents']))
    else:
        raise ValueError('unknown share type: %r' % (share['type'],))

DONATION_SCRIPT = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash('BJG379oBv9cn94cdz85E9K5ejRhrHcvKgs',networks.nets['buffalocoin']))

class Share(object):
    VERSION = 13
    VOTING_VERSION = 13
    SUCCESSOR = None
    
    small_block_header_type = pack.ComposedType([
        ('version', pack.VarIntType()),
        ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
        ('timestamp', pack.IntType(32)),
        ('bits', bitcoin_data.FloatingIntegerType()),
        ('nonce', pack.IntType(32)),
    ])
    
    share_info_type = pack.ComposedType([
Esempio n. 17
0
def main(address, network_name):
    pubkeyHash = bitcoin_data.address_to_pubkey_hash(
        address, networks.nets[network_name])
    P2PKH = bitcoin_data.pubkey_hash_to_script2(pubkeyHash).encode('hex')

    print "scriptPubKey P2PKH hex: " + P2PKH
Esempio n. 18
0
    def __init__(self, net, peer_addr, contents):
        dynamic_types = self.get_dynamic_types(net)
        self.share_info_type = dynamic_types['share_info_type']
        self.share_type = dynamic_types['share_type']
        self.ref_type = dynamic_types['ref_type']

        self.net = net
        self.peer_addr = peer_addr
        self.contents = contents
        
        self.min_header = contents['min_header']
        self.share_info = contents['share_info']
        self.hash_link = contents['hash_link']
        self.merkle_link = contents['merkle_link']

        # save some memory if we can
        txrefs = self.share_info['transaction_hash_refs']
        if txrefs and max(txrefs) < 2**16:
            self.share_info['transaction_hash_refs'] = array.array('H', txrefs)
        elif txrefs and max(txrefs) < 2**32: # in case we see blocks with more than 65536 tx in the future
            self.share_info['transaction_hash_refs'] = array.array('L', txrefs)
        
        segwit_activated = is_segwit_activated(self.VERSION, net)
        
        if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
            raise ValueError('''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']),))
        
        if len(self.merkle_link['branch']) > 16 or (segwit_activated and len(self.share_info['segwit_data']['txid_merkle_link']['branch']) > 16):
            raise ValueError('merkle branch too long!')
        
        assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data'])
        
        self.share_data = self.share_info['share_data']
        self.max_target = self.share_info['max_bits'].target
        self.target = self.share_info['bits'].target
        self.timestamp = self.share_info['timestamp']
        self.previous_hash = self.share_data['previous_share_hash']
        self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
        self.desired_version = self.share_data['desired_version']
        self.absheight = self.share_info['absheight']
        self.abswork = self.share_info['abswork']
        if net.NAME == 'bitcoin' and self.absheight > 3927800 and self.desired_version == 16:
            raise ValueError("This is not a hardfork-supporting share!")
        
        n = set()
        for share_count, tx_count in self.iter_transaction_hash_refs():
            assert share_count < 110
            if share_count == 0:
                n.add(tx_count)
        assert n == set(range(len(self.share_info['new_transaction_hashes'])))
        
        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.share_info['segwit_data']['txid_merkle_link'] if segwit_activated else self.merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
        
        if self.target > net.MAX_TARGET:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share target invalid')
        
        if self.pow_hash > self.target:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share PoW invalid')
        
        self.new_transaction_hashes = self.share_info['new_transaction_hashes']
        
        # XXX eww
        self.time_seen = time.time()
Esempio n. 19
0
    def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None, segwit_data=None):
        t0 = time.time()
        previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
        
        height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
            pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
            pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
            pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET))
        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//30, pre_target3)))
        
        new_transaction_hashes = []
        new_transaction_size = 0 # including witnesses
        all_transaction_stripped_size = 0 # stripped size
        all_transaction_real_size = 0 # including witnesses, for statistics
        new_transaction_weight = 0
        all_transaction_weight = 0
        transaction_hash_refs = []
        other_transaction_hashes = []
        t1 = time.time()
        past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100)))
        tx_hash_to_this = {}
        for i, share in enumerate(past_shares):
            for j, tx_hash in enumerate(share.new_transaction_hashes):
                if tx_hash not in tx_hash_to_this:
                    tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count
        t2 = time.time()
        for tx_hash, fee in desired_other_transaction_hashes_and_fees:
            if known_txs is not None:
                this_stripped_size = bitcoin_data.tx_id_type.packed_size(known_txs[tx_hash])
                this_real_size     = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
                this_weight        = this_real_size + 3*this_stripped_size
            else: # we're just verifying someone else's share. We'll calculate sizes in should_punish_reason()
                this_stripped_size = 0
                this_real_size = 0
                this_weight = 0

            if all_transaction_stripped_size + this_stripped_size + 80 + cls.gentx_size +  500 > net.BLOCK_MAX_SIZE:
                break
            if all_transaction_weight + this_weight + 4*80 + cls.gentx_weight + 2000 > net.BLOCK_MAX_WEIGHT:
                break

            if tx_hash in tx_hash_to_this:
                this = tx_hash_to_this[tx_hash]
                if known_txs is not None:
                    all_transaction_stripped_size += this_stripped_size
                    all_transaction_real_size += this_real_size
                    all_transaction_weight += this_weight
            else:
                if known_txs is not None:
                    new_transaction_size += this_real_size
                    all_transaction_stripped_size += this_stripped_size
                    all_transaction_real_size += this_real_size
                    new_transaction_weight += this_weight
                    all_transaction_weight += this_weight
                new_transaction_hashes.append(tx_hash)
                this = [0, len(new_transaction_hashes)-1]
            transaction_hash_refs.extend(this)
            other_transaction_hashes.append(tx_hash)

        t3 = time.time()
        if transaction_hash_refs and max(transaction_hash_refs) < 2**16:
            transaction_hash_refs = array.array('H', transaction_hash_refs)
        elif transaction_hash_refs and max(transaction_hash_refs) < 2**32: # in case we see blocks with more than 65536 tx
            transaction_hash_refs = array.array('L', transaction_hash_refs)
        t4 = time.time()

        if all_transaction_stripped_size:
            print "Generating a share with %i bytes, %i WU (new: %i B, %i WU) in %i tx (%i new), plus est gentx of %i bytes/%i WU" % (
                all_transaction_real_size,
                all_transaction_weight,
                new_transaction_size,
                new_transaction_weight,
                len(other_transaction_hashes),
                len(new_transaction_hashes),
                cls.gentx_size,
                cls.gentx_weight)
            print "Total block stripped size=%i B, full size=%i B,  weight: %i WU" % (
                80+all_transaction_stripped_size+cls.gentx_size, 
                80+all_transaction_real_size+cls.gentx_size, 
                3*80+all_transaction_weight+cls.gentx_weight)

        included_transactions = set(other_transaction_hashes)
        removed_fees = [fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions]
        definite_fees = sum(0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions)
        if None not in removed_fees:
            share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees))
        else:
            assert base_subsidy is not None
            share_data = dict(share_data, subsidy=base_subsidy + definite_fees)
        
        weights, total_weight, donation_weight = tracker.get_cumulative_weights(previous_share.share_data['previous_share_hash'] if previous_share is not None else None,
            max(0, min(height, net.REAL_CHAIN_LENGTH) - 1),
            65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
        )
        assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
        
        amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
        this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
        amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
        amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
        
        if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
            raise ValueError()
        
        dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit

        segwit_activated = is_segwit_activated(cls.VERSION, net)
        if segwit_data is None and known_txs is None:
            segwit_activated = False
        if not(segwit_activated or known_txs is None) and any(bitcoin_data.is_segwit_tx(known_txs[h]) for h in other_transaction_hashes):
            raise ValueError('segwit transaction included before activation')
        if segwit_activated and known_txs is not None:
            share_txs = [(known_txs[h], bitcoin_data.get_txid(known_txs[h]), h) for h in other_transaction_hashes]
            segwit_data = dict(txid_merkle_link=bitcoin_data.calculate_merkle_link([None] + [tx[1] for tx in share_txs], 0), wtxid_merkle_root=bitcoin_data.merkle_hash([0] + [bitcoin_data.get_wtxid(tx[0], tx[1], tx[2]) for tx in share_txs]))
        if segwit_activated and segwit_data is not None:
            witness_reserved_value_str = '[P2Pool]'*4
            witness_reserved_value = pack.IntType(256).unpack(witness_reserved_value_str)
            witness_commitment_hash = bitcoin_data.get_witness_commitment_hash(segwit_data['wtxid_merkle_root'], witness_reserved_value)

        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,

            timestamp=(math.clip(desired_timestamp, (
                        (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
                        (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),)) if previous_share is not None else desired_timestamp
                      ) if cls.VERSION < 32 else
                      max(desired_timestamp, (previous_share.timestamp + 1)) if previous_share is not None else desired_timestamp,
            new_transaction_hashes=new_transaction_hashes,
            transaction_hash_refs=transaction_hash_refs,
            absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32,
            abswork=((previous_share.abswork if previous_share is not None else 0) + bitcoin_data.target_to_average_attempts(bits.target)) % 2**128,
        )

        if previous_share != None and desired_timestamp > previous_share.timestamp + 180:
            print "Warning: Previous share's timestamp is %i seconds old." % int(desired_timestamp - previous_share.timestamp)
            print "Make sure your system clock is accurate, and ensure that you're connected to decent peers."
            print "If your clock is more than 300 seconds behind, it can result in orphaned shares."
            print "(It's also possible that this share is just taking a long time to mine.)"
        if previous_share != None and previous_share.timestamp > int(time.time()) + 3:
            print "WARNING! Previous share's timestamp is %i seconds in the future. This is not normal." % \
                   int(previous_share.timestamp - (int(time.time())))
            print "Make sure your system clock is accurate. Errors beyond 300 sec result in orphaned shares."

        if segwit_activated:
            share_info['segwit_data'] = segwit_data
        
        gentx = dict(
            version=1,
            tx_ins=[dict(
                previous_output=None,
                sequence=None,
                script=share_data['coinbase'],
            )],
            tx_outs=([dict(value=0, script='\x6a\x24\xaa\x21\xa9\xed' + pack.IntType(256).pack(witness_commitment_hash))] if segwit_activated else []) +
                [dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT] +
                [dict(value=0, script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce))],
            lock_time=0,
        )
        if segwit_activated:
            gentx['marker'] = 0
            gentx['flag'] = 1
            gentx['witness'] = [[witness_reserved_value_str]]
        
        def get_share(header, last_txout_nonce=last_txout_nonce):
            min_header = dict(header); del min_header['merkle_root']
            share = cls(net, None, dict(
                min_header=min_header,
                share_info=share_info,
                ref_merkle_link=dict(branch=[], index=0),
                last_txout_nonce=last_txout_nonce,
                hash_link=prefix_to_hash_link(bitcoin_data.tx_id_type.pack(gentx)[:-32-8-4], cls.gentx_before_refhash),
                merkle_link=bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0),
            ))
            assert share.header == header # checks merkle_root
            return share
        t5 = time.time()
        if p2pool.BENCH: print "%8.3f ms for data.py:generate_transaction(). Parts: %8.3f %8.3f %8.3f %8.3f %8.3f " % (
            (t5-t0)*1000.,
            (t1-t0)*1000.,
            (t2-t1)*1000.,
            (t3-t2)*1000.,
            (t4-t3)*1000.,
            (t5-t4)*1000.)
        return share_info, gentx, other_transaction_hashes, get_share
Esempio n. 20
0
    merkle_root=
    0x916fb05aed6373d6e5c49626efc0dea5d103038135bffbc86014f5204df2ebe3,
    timestamp=1367995782,
    bits=data.FloatingInteger(0x1e0fffff),
    nonce=58004,
)

DONATION_SCRIPT = '01210241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd69021036fbd9d0a34a569f10b0431c8aeecf74ad796b99838b7272ef35ded130a794f9b02ae'.decode(
    'hex')
print DONATION_SCRIPT[2:35].encode('hex')
print data.pubkey_to_address(DONATION_SCRIPT[2:35], networks.nets['cachecoin'])
print networks.nets['cachecoin'].POW_FUNC(
    data.block_header_type.pack(block_header))

print data.pubkey_hash_to_script2(
    data.address_to_pubkey_hash('CP8qRQAESbU6CqJvsjh2tCSZWLQnhQd3n3',
                                networks.nets['cachecoin'])).encode('hex')

donate = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode(
    'hex')
#print data.script2_to_address(donate, networks.nets['bitcoin'])
#print len("c8c6a3c0957d53698da14d7a2c176a133e92fc53".decode('hex'))
#print donate[1:-1].encode('hex')
#print data.pubkey_to_script2(donate[1:-1]).encode('hex')
#print donate[3:-2].encode('hex')
#print data.pubkey_hash_to_script2(donate[3:-2]).encode('hex')
#print len('0241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd690'.decode('hex'))
#print data.pubkey_to_script2('0241b8aba0994f320a8b438c627dbf31fbdd7dc722dd8418d829d67a9c6e4fd690'
#036fbd9d0a34a569f10b0431c8aeecf74ad796b99838b7272ef35ded130a794f9b
print data.base58_encode(
    'cac8509b8f959d14253e934585c52d04bf4ae21d28e62414a0c71c9fb80a5713'.decode(
Esempio n. 21
0
 def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None):
     previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
     
     height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
     assert height >= net.REAL_CHAIN_LENGTH or last is None
     if height < net.TARGET_LOOKBEHIND:
         pre_target3 = net.MAX_TARGET
     else:
         attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
         pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
         pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
         pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET))
     max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
     bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//30, pre_target3)))
     
     new_transaction_hashes = []
     new_transaction_size = 0
     transaction_hash_refs = []
     other_transaction_hashes = []
     
     past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100)))
     tx_hash_to_this = {}
     for i, share in enumerate(past_shares):
         for j, tx_hash in enumerate(share.new_transaction_hashes):
             if tx_hash not in tx_hash_to_this:
                 tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count
     for tx_hash, fee in desired_other_transaction_hashes_and_fees:
         if tx_hash in tx_hash_to_this:
             this = tx_hash_to_this[tx_hash]
         else:
             if known_txs is not None:
                 this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
                 if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
                     break
                 new_transaction_size += this_size
             new_transaction_hashes.append(tx_hash)
             this = [0, len(new_transaction_hashes)-1]
         transaction_hash_refs.extend(this)
         other_transaction_hashes.append(tx_hash)
     
     included_transactions = set(other_transaction_hashes)
     removed_fees = [fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions]
     definite_fees = sum(0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions)
     if None not in removed_fees:
         share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees))
     else:
         assert base_subsidy is not None
         share_data = dict(share_data, subsidy=base_subsidy + definite_fees)
     
     weights, total_weight, donation_weight = tracker.get_cumulative_weights(previous_share.share_data['previous_share_hash'] if previous_share is not None else None,
         min(height, net.REAL_CHAIN_LENGTH-1),
         65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
     )
     assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
     
     amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
     this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
     amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
     amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
     
     if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
         raise ValueError()
     
     dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit
     
     share_info = dict(
         share_data=share_data,
         far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
         max_bits=max_bits,
         bits=bits,
         timestamp=math.clip(desired_timestamp, (
             (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
             (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
         )) if previous_share is not None else desired_timestamp,
         new_transaction_hashes=new_transaction_hashes,
         transaction_hash_refs=transaction_hash_refs,
         absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32,
         abswork=((previous_share.abswork if previous_share is not None else 0) + bitcoin_data.target_to_average_attempts(bits.target)) % 2**128,
     )
     
     gentx = dict(
         version=1,
         tx_ins=[dict(
             previous_output=None,
             sequence=None,
             script=share_data['coinbase'],
         )],
         tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT] + [dict(
             value=0,
             script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce),
         )],
         lock_time=0,
     )
     
     def get_share(header, last_txout_nonce=last_txout_nonce):
         min_header = dict(header); del min_header['merkle_root']
         share = cls(net, None, dict(
             min_header=min_header,
             share_info=share_info,
             ref_merkle_link=dict(branch=[], index=0),
             last_txout_nonce=last_txout_nonce,
             hash_link=prefix_to_hash_link(bitcoin_data.tx_type.pack(gentx)[:-32-8-4], cls.gentx_before_refhash),
             merkle_link=bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0),
         ))
         assert share.header == header # checks merkle_root
         return share
     
     return share_info, gentx, other_transaction_hashes, get_share
Esempio n. 22
0
    def generate_transaction(cls, tracker, share_data, block_target,
                             desired_timestamp, desired_target,
                             ref_merkle_link, other_transaction_hashes, net):
        previous_share = tracker.items[
            share_data['previous_share_hash']] if share_data[
                'previous_share_hash'] is not None else None

        height, last = tracker.get_height_and_last(
            share_data['previous_share_hash'])
        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(
                tracker,
                share_data['previous_share_hash'],
                net.TARGET_LOOKBEHIND,
                min_work=True,
                integer=True)
            pre_target = 2**256 // (
                net.SHARE_PERIOD *
                attempts_per_second) - 1 if attempts_per_second else 2**256 - 1
            pre_target2 = math.clip(pre_target,
                                    (previous_share.max_target * 9 // 10,
                                     previous_share.max_target * 11 // 10))
            pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(
            pre_target3)
        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(
            math.clip(desired_target, (pre_target3 // 10, pre_target3)))

        weights, total_weight, donation_weight = tracker.get_cumulative_weights(
            share_data['previous_share_hash'],
            min(height, net.REAL_CHAIN_LENGTH),
            65535 * net.SPREAD *
            bitcoin_data.target_to_average_attempts(block_target),
        )
        assert total_weight == sum(weights.itervalues()) + donation_weight, (
            total_weight, sum(weights.itervalues()) + donation_weight)

        amounts = dict(
            (script,
             share_data['subsidy'] * (199 * weight) // (200 * total_weight))
            for script, weight in weights.iteritems(
            ))  # 99.5% goes according to weights prior to this share
        this_script = bitcoin_data.pubkey_hash_to_script2(
            share_data['pubkey_hash'])
        amounts[this_script] = amounts.get(
            this_script,
            0) + share_data['subsidy'] // 200  # 0.5% goes to block finder
        amounts[DONATION_SCRIPT] = amounts.get(
            DONATION_SCRIPT, 0
        ) + share_data['subsidy'] - sum(
            amounts.itervalues()
        )  # all that's left over is the donation weight and some extra satoshis due to rounding

        if sum(amounts.itervalues()) != share_data['subsidy'] or any(
                x < 0 for x in amounts.itervalues()):
            raise ValueError()

        dests = sorted(
            amounts.iterkeys(),
            key=lambda script:
            (script == DONATION_SCRIPT, amounts[script], script))[
                -4000:]  # block length limit, unlikely to ever be hit

        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else
            tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,
            timestamp=math.clip(
                desired_timestamp,
                (
                    (previous_share.timestamp + net.SHARE_PERIOD) -
                    (net.SHARE_PERIOD - 1),  # = previous_share.timestamp + 1
                    (previous_share.timestamp + net.SHARE_PERIOD) +
                    (net.SHARE_PERIOD - 1),
                )) if previous_share is not None else desired_timestamp,
        )

        gentx = dict(
            version=1,
            tx_ins=[
                dict(
                    previous_output=None,
                    sequence=None,
                    script=share_data['coinbase'].ljust(2, '\x00'),
                )
            ],
            tx_outs=[
                dict(value=amounts[script], script=script) for script in dests
                if amounts[script] or script == DONATION_SCRIPT
            ] + [
                dict(
                    value=0,
                    script='\x20' +
                    cls.get_ref_hash(net, share_info, ref_merkle_link),
                )
            ],
            lock_time=0,
        )

        def get_share(header, transactions):
            assert transactions[0] == gentx and [
                bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
                for tx in transactions[1:]
            ] == other_transaction_hashes
            min_header = dict(header)
            del min_header['merkle_root']
            hash_link = prefix_to_hash_link(
                bitcoin_data.tx_type.pack(gentx)[:-32 - 4],
                cls.gentx_before_refhash)
            merkle_link = bitcoin_data.calculate_merkle_link(
                [None] + other_transaction_hashes, 0)
            pow_hash = net.PARENT.POW_FUNC(
                bitcoin_data.block_header_type.pack(header))
            return cls(net,
                       None,
                       dict(
                           min_header=min_header,
                           share_info=share_info,
                           hash_link=hash_link,
                           ref_merkle_link=dict(branch=[], index=0),
                       ),
                       merkle_link=merkle_link,
                       other_txs=transactions[1:]
                       if pow_hash <= header['bits'].target else None)

        return share_info, gentx, other_transaction_hashes, get_share