예제 #1
0
 def tx_hash( cls, tx ):
     """
     Calculate the hash of a transction
     """
     tx_hex = bits.tx_serialize( tx )
     tx_hash = pybitcoin.bin_double_sha256(tx_hex.decode('hex'))[::-1].encode('hex')
     return tx_hash
예제 #2
0
 def tx_hash( cls, tx ):
     """
     Calculate the hash of a transction
     """
     tx_hex = virtualchain.tx_to_hex( tx )
     tx_hash = pybitcoin.bin_double_sha256(tx_hex.decode('hex'))[::-1].encode('hex')
     return tx_hash
예제 #3
0
 def tx_hash(cls, tx):
     """
     Calculate the hash of a transction
     """
     tx_hex = virtualchain.tx_to_hex(tx)
     tx_hash = pybitcoin.bin_double_sha256(
         tx_hex.decode('hex'))[::-1].encode('hex')
     return tx_hash
예제 #4
0
 def tx_hash(cls, tx):
     """
     Calculate the hash of a transction
     """
     tx_hex = bits.tx_serialize(tx)
     tx_hash = pybitcoin.bin_double_sha256(
         tx_hex.decode('hex'))[::-1].encode('hex')
     return tx_hash
예제 #5
0
def tx_get_hash(tx_serialized):
    """
    Make a transaction hash from a hex tx
    """
    tx_reversed_bin_hash = pybitcoin.bin_double_sha256(
        binascii.unhexlify(tx_serialized))
    tx_candidate_hash = binascii.hexlify(tx_reversed_bin_hash[::-1])
    return tx_candidate_hash
예제 #6
0
def block_header_verify( block_data, prev_hash, block_hash ):
    """
    Verify whether or not bitcoind's block header matches the hash we expect.
    """
    serialized_header = block_header_to_hex( block_data, prev_hash )
    candidate_hash_bin_reversed = pybitcoin.bin_double_sha256(binascii.unhexlify(serialized_header))
    candidate_hash = binascii.hexlify( candidate_hash_bin_reversed[::-1] )

    return block_hash == candidate_hash
예제 #7
0
def block_header_verify( block_data, prev_hash, block_hash ):
    """
    Verify whether or not bitcoind's block header matches the hash we expect.
    """
    serialized_header = block_header_to_hex( block_data, prev_hash )
    candidate_hash_bin_reversed = pybitcoin.bin_double_sha256(binascii.unhexlify(serialized_header))
    candidate_hash = binascii.hexlify( candidate_hash_bin_reversed[::-1] )

    return block_hash == candidate_hash
예제 #8
0
def tx_verify( tx, tx_hash ):
    """
    Confirm that a bitcoin transaction has the given hash.
    """
    tx_serialized = tx_to_hex( tx )
    tx_reversed_bin_hash = pybitcoin.bin_double_sha256( binascii.unhexlify(tx_serialized) )
    tx_candidate_hash = binascii.hexlify(tx_reversed_bin_hash[::-1])

    return tx_hash == tx_candidate_hash
def tx_verify(tx, tx_hash):
    """
    Confirm that a bitcoin transaction has the given hash.
    """
    tx_serialized = tx_to_hex(tx)
    tx_reversed_bin_hash = pybitcoin.bin_double_sha256(
        binascii.unhexlify(tx_serialized))
    tx_candidate_hash = binascii.hexlify(tx_reversed_bin_hash[::-1])

    return tx_hash == tx_candidate_hash
예제 #10
0
    def flush_transactions( self ):
        """
        TESTING ONLY

        Send the bufferred list of transactions as a block.
        Save the resulting transactions to a temporary file.
        """
        
        # next block
        txs = self.next_block_txs
        self.next_block_txs = []

        # add a fake coinbase 
        txs.append( "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff53038349040d00456c69676975730052d8f72ffabe6d6dd991088decd13e658bbecc0b2b4c87306f637828917838c02a5d95d0e1bdff9b0400000000000000002f73733331312f00906b570400000000e4050000ffffffff01bf208795000000001976a9145399c3093d31e4b0af4be1215d59b857b861ad5d88ac00000000" )

        block_txs = {}
        block_txids = []
        for tx in txs:
            txid = make_txid( tx )
            block_txids.append( txid )
            block_txs[ txid ] = tx

        version = '01000000'
        t_hex = "%08X" % self.time
        difficulty_hex = "%08X" % self.difficulty
        tx_merkle_tree = pybitcoin.MerkleTree( block_txids )
        tx_merkle_root = tx_merkle_tree.root()
        prev_block_hash = self.block_hashes[ self.end_block - 1 ]        

        # next block
        block = {
            'merkleroot': tx_merkle_root,
            'nonce': 0,                 # mock
            'previousblockhash': prev_block_hash,
            'version': 3,
            'tx': block_txids,
            'chainwork': '00' * 32,     # mock
            'height': self.end_block,
            'difficulty': Decimal(0.0), # mock
            'nextblockhash': None,      # to be filled in
            'confirmations': None,      # to be filled in 
            'time': self.time,          # mock
            'bits': "0x00000000",       # mock
            'size': sum( [len(tx) for tx in txs] ) + 32   # mock
        }

        block_header = bitcoin.main.encode(block['version'], 256, 4)[::-1] + \
                       block['previousblockhash'].decode('hex')[::-1] + \
                       block['merkleroot'].decode('hex')[::-1] + \
                       bitcoin.main.encode(block['time'], 256, 4)[::-1] + \
                       bitcoin.main.encode(int(block['bits'], 16), 256, 4)[::-1] + \
                       bitcoin.main.encode(block['nonce'], 256, 4)[::-1] 

        block['hash'] = pybitcoin.bin_double_sha256( block_header )[::-1].encode('hex')
        block['header'] = block_header

        for txid in block['tx']:
            # update txid --> blockhash map 
            self.txid_to_blockhash[ txid ] = block['hash']

        # update nextblockhash at least 
        self.blocks[prev_block_hash]['nextblockhash'] = block['hash']
        self.block_hashes[ self.end_block ] = block['hash']
        self.blocks[ block['hash'] ] = block
        self.txs.update( block_txs )

        self.time += 600    # 10 minutes
        self.difficulty += 1
        self.end_block += 1

        if self.save_file is not None:
            self.save( self.save_file )

        if self.spv_headers_path is not None:
            with open(self.spv_headers_path, "a+") as f:
                f.write( block_header )
                f.write( "00".decode('hex') )    # our SPV client expects varint for tx count to be zero

        return [ make_txid( tx ) for tx in txs ]
예제 #11
0
            return None

        try:
            for resp in resp_json:
                assert 'result' in resp, "Missing result"

                txhex = resp['result']
                assert txhex is not None, "Invalid RPC response '%s' (for %s)" % (
                    simplejson.dumps(resp), txids[resp['id']])

                try:

                    tx_bin = txhex.decode('hex')
                    assert tx_bin is not None

                    tx_hash_bin = pybitcoin.bin_double_sha256(tx_bin)[::-1]
                    assert tx_hash_bin is not None

                    tx_hash = tx_hash_bin.encode('hex')
                    assert tx_hash is not None

                except Exception, e:
                    log.error("Failed to calculate txid of %s" % txhex)
                    raise

                # solicited transaction?
                assert tx_hash in txids, "Unsolicited transaction %s" % tx_hash

                # unique?
                if tx_hash in ret.keys():
                    continue
def testnet_encode( pk_wif ):
    s = pybitcoin.b58check_decode(pk_wif )
    s = '\xef' + s
    ret = base58.b58encode( s + pybitcoin.bin_double_sha256(s)[0:4] )
    return ret
    def flush_transactions( self ):
        """
        TESTING ONLY

        Send the bufferred list of transactions as a block.
        Save the resulting transactions to a temporary file.
        """
        
        # next block
        txs = []
        if self.next_block_txs_path is not None and os.path.exists( self.next_block_txs_path ):
            txs = self.restore_next( self.next_block_txs_path )
            os.unlink( self.next_block_txs_path )

        # add a fake coinbase 
        txs.append( "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff53038349040d00456c69676975730052d8f72ffabe6d6dd991088decd13e658bbecc0b2b4c87306f637828917838c02a5d95d0e1bdff9b0400000000000000002f73733331312f00906b570400000000e4050000ffffffff01bf208795000000001976a9145399c3093d31e4b0af4be1215d59b857b861ad5d88ac00000000" )

        block_txs = {}
        block_txids = []
        for tx in txs:
            txid = make_txid( str(tx) )
            block_txids.append( txid )
            block_txs[ txid ] = tx

        version = '01000000'
        t_hex = "%08X" % self.time
        difficulty_hex = "%08X" % self.difficulty
        tx_merkle_tree = pybitcoin.MerkleTree( block_txids )
        tx_merkle_root = tx_merkle_tree.root()
        prev_block_hash = self.block_hashes[ self.end_block - 1 ]        

        # next block
        block = {
            'merkleroot': tx_merkle_root,
            'nonce': 0,                 # mock
            'previousblockhash': prev_block_hash,
            'version': 3,
            'tx': block_txids,
            'chainwork': '00' * 32,     # mock
            'height': self.end_block,
            'difficulty': Decimal(0.0), # mock
            'nextblockhash': None,      # to be filled in
            'confirmations': None,      # to be filled in 
            'time': self.time,          # mock
            'bits': "0x00000000",       # mock
            'size': sum( [len(tx) for tx in txs] ) + 32   # mock
        }

        block_header = bitcoin.main.encode(block['version'], 256, 4)[::-1] + \
                       block['previousblockhash'].decode('hex')[::-1] + \
                       block['merkleroot'].decode('hex')[::-1] + \
                       bitcoin.main.encode(block['time'], 256, 4)[::-1] + \
                       bitcoin.main.encode(int(block['bits'], 16), 256, 4)[::-1] + \
                       bitcoin.main.encode(block['nonce'], 256, 4)[::-1] 

        block['hash'] = pybitcoin.bin_double_sha256( block_header )[::-1].encode('hex')
        block['header'] = block_header

        for txid in block['tx']:
            # update txid --> blockhash map 
            self.txid_to_blockhash[ txid ] = block['hash']

        # update nextblockhash at least 
        self.blocks[prev_block_hash]['nextblockhash'] = block['hash']
        self.block_hashes[ self.end_block ] = block['hash']
        self.blocks[ block['hash'] ] = block
        self.txs.update( block_txs )

        self.time += 600    # 10 minutes
        self.difficulty += 1
        self.end_block += 1

        if self.save_file is not None:
            self.save( self.save_file )

        if self.spv_headers_path is not None:
            with open(self.spv_headers_path, "a+") as f:
                f.write( block_header )
                f.write( "00".decode('hex') )    # our SPV client expects varint for tx count to be zero

        return [ make_txid( str(tx) ) for tx in txs ]
def get_nulldata_txs_in_blocks(workpool,
                               bitcoind_opts,
                               blocks_ids,
                               first_block_hash=None):
    """
   Obtain the set of transactions over a range of blocks that have an OP_RETURN with nulldata.
   Each returned transaction record will contain:
   * vin (list of inputs from bitcoind)
   * vout (list of outputs from bitcoind)
   * txid (transaction ID, as a hex string)
   * txindex (transaction index in the block)
   * senders (a list of {"script_pubkey":, "amount":, and "addresses":} dicts; the "script_pubkey" field is the hex-encoded op script).
   * fee (total amount sent)
   * nulldata (input data to the transaction's script; encodes virtual chain operations)
   
   Farm out the requisite RPCs to a workpool of processes, each 
   of which have their own bitcoind RPC client.
   
   Returns [(block_number, [txs])], where each tx contains the above.
   """

    nulldata_tx_map = {}  # {block_number: {"tx": [tx]}}
    block_bandwidth = {
    }  # {block_number: {"time": time taken to process, "size": number of bytes}}
    nulldata_txs = []

    # break work up into slices of blocks, so we don't run out of memory
    slice_len = multiprocess_batch_size(bitcoind_opts)
    slice_count = 0
    last_block_hash = first_block_hash

    while slice_count * slice_len < len(blocks_ids):

        block_hashes = {}  # map block ID to block hash
        block_datas = {}  # map block hashes to block data
        block_hash_futures = []
        block_data_futures = []
        tx_futures = []
        nulldata_tx_futures = []
        all_nulldata_tx_futures = []
        block_times = {}  # {block_number: time taken to process}

        block_slice = blocks_ids[(slice_count *
                                  slice_len):min((slice_count + 1) *
                                                 slice_len, len(blocks_ids))]
        if len(block_slice) == 0:
            log.debug("Zero-length block slice")
            break

        start_slice_time = time.time()

        # get all block hashes
        for block_number in block_slice:

            block_times[block_number] = time.time()

            block_hash_fut = getblockhash_async(workpool, bitcoind_opts,
                                                block_number)
            block_hash_futures.append((block_number, block_hash_fut))

        # coalesce all block hashes
        block_hash_time_start = time.time()
        block_hash_time_end = 0

        for i in xrange(0, len(block_hash_futures)):

            block_number, block_hash_fut = future_next(block_hash_futures,
                                                       lambda f: f[1])

            # NOTE: interruptable blocking get(), but should not block since future_next found one that's ready
            block_hash = future_get_result(block_hash_fut, 10000000000000000L)
            block_hashes[block_number] = block_hash

            # start getting each block's data
            if block_hash is not None:
                block_data_fut = getblock_async(workpool, bitcoind_opts,
                                                block_hash)
                block_data_futures.append((block_number, block_data_fut))

            else:
                raise Exception("BUG: Block %s: no block hash" % block_number)

        block_data_time_start = time.time()
        block_data_time_end = 0

        # coalesce block data
        for i in xrange(0, len(block_data_futures)):

            block_number, block_data_fut = future_next(block_data_futures,
                                                       lambda f: f[1])
            block_hash_time_end = time.time()

            # NOTE: interruptable blocking get(), but should not block since future_next found one that's ready
            block_data = future_get_result(block_data_fut, 1000000000000000L)

            if 'tx' not in block_data:
                raise Exception("BUG: No tx data in block %s" % block_number)

            block_datas[block_hashes[block_number]] = block_data

        # verify blockchain headers
        for i in xrange(0, len(block_slice)):
            block_id = block_slice[i]
            block_hash = block_hashes[block_id]

            prev_block_hash = None
            if i > 0:
                prev_block_id = block_slice[i - 1]
                prev_block_hash = block_hashes[prev_block_id]

            elif last_block_hash is not None:
                prev_block_hash = last_block_hash

            else:
                continue

            if not block_header_verify(block_datas[block_hash],
                                       prev_block_hash, block_hash):
                serialized_header = block_header_to_hex(
                    block_datas[block_hash], prev_block_hash)
                candidate_hash_reversed = pybitcoin.bin_double_sha256(
                    binascii.unhexlify(serialized_header))
                candidate_hash = binascii.hexlify(
                    candidate_hash_reversed[::-1])
                raise Exception(
                    "Hash mismatch on block %s: got invalid block hash (expected %s, got %s)"
                    % (block_id, block_hash, candidate_hash))

        last_block_hash = block_hashes[block_slice[-1]]

        for block_number in block_slice:

            block_hash = block_hashes[block_number]
            block_data = block_datas[block_hash]

            # verify block data txs
            rc = block_verify(block_data)
            if not rc:
                raise Exception(
                    "Hash mismatch on block %s: got invalid Merkle root (expected %s)"
                    % (block_hash, block_data['merkleroot']))

            # go get each transaction
            tx_hashes = block_data['tx']

            log.debug("Get %s transactions from block %s" %
                      (len(tx_hashes), block_hash))

            # can get transactions asynchronously with a workpool (but preserve tx order!)
            if len(tx_hashes) > 0:

                for j in xrange(0, len(tx_hashes)):

                    tx_hash = tx_hashes[j]
                    tx_fut = getrawtransaction_async(workpool, bitcoind_opts,
                                                     tx_hash, 1)
                    tx_futures.append((block_number, j, tx_fut))

            else:

                raise Exception("BUG: Zero-transaction block %s" %
                                block_number)

        block_tx_time_start = time.time()
        block_tx_time_end = 0

        # coalesce raw transaction queries...
        for i in xrange(0, len(tx_futures)):

            block_number, tx_index, tx_fut = future_next(
                tx_futures, lambda f: f[2])
            block_data_time_end = time.time()

            # NOTE: interruptable blocking get(), but should not block since future_next found one that's ready
            tx = future_get_result(tx_fut, 1000000000000000L)

            #if len(tx['vin']) > 0 and 'coinbase' not in tx['vin'][0].keys():
            if not tx_is_coinbase(tx):

                # verify non-coinbase transaction
                tx_hash = tx['txid']
                if not tx_verify(tx, tx_hash):
                    raise Exception(
                        "Transaction hash mismatch in %s (index %s) in block %s"
                        % (tx['txid'], tx_index, block_number))

            if tx and has_nulldata(tx):

                # go get input transactions for this transaction (since it's the one with nulldata, i.e., a virtual chain operation),
                # but tag each future with the hash of the current tx, so we can reassemble the in-flight inputs back into it.
                nulldata_tx_futs_and_output_idxs = process_nulldata_tx_async(
                    workpool, bitcoind_opts, tx)
                nulldata_tx_futures.append((block_number, tx_index, tx,
                                            nulldata_tx_futs_and_output_idxs))

            else:

                # maybe done with this block
                # NOTE will be called multiple times; we expect the last write to be the total time taken by this block
                total_time = time.time() - block_times[block_number]
                block_bandwidth[block_number] = bandwidth_record(
                    total_time, None)

        block_nulldata_tx_time_start = time.time()
        block_nulldata_tx_time_end = 0

        # coalesce queries on the inputs to each nulldata transaction from this block...
        for (block_number, tx_index, tx,
             nulldata_tx_futs_and_output_idxs) in nulldata_tx_futures:

            if ('vin' not in tx) or ('vout' not in tx) or ('txid' not in tx):
                continue

            outputs = tx['vout']

            total_in = 0  # total input paid
            senders = []
            ordered_senders = []

            # gather this tx's nulldata-bearing transactions
            for i in xrange(0, len(nulldata_tx_futs_and_output_idxs)):

                input_idx, input_tx_fut, tx_output_index = future_next(
                    nulldata_tx_futs_and_output_idxs, lambda f: f[1])

                # NOTE: interruptable blocking get(), but should not block since future_next found one that's ready
                input_tx = future_get_result(input_tx_fut, 1000000000000000L)
                input_tx_hash = input_tx['txid']

                # verify (but skip coinbase)
                if not tx_is_coinbase(input_tx):
                    try:
                        if not tx_verify(input_tx, input_tx_hash):
                            raise Exception(
                                "Input transaction hash mismatch %s from tx %s (index %s)"
                                % (input_tx['txid'], tx['txid'],
                                   tx_output_index))
                    except:
                        pp = pprint.PrettyPrinter()
                        pp.pprint(input_tx)
                        raise

                sender, amount_in = get_sender_and_amount_in_from_txn(
                    input_tx, tx_output_index)

                if sender is None or amount_in is None:
                    continue

                total_in += amount_in

                # preserve sender order...
                ordered_senders.append((input_idx, sender))

            # sort on input_idx, so the list of senders matches the given transaction's list of inputs
            ordered_senders.sort()
            senders = [sender for (_, sender) in ordered_senders]

            total_out = get_total_out(outputs)
            nulldata = get_nulldata(tx)

            # extend tx to explicitly record its nulldata (i.e. the virtual chain op),
            # the list of senders (i.e. their script hexs),
            # and the total amount paid
            tx['nulldata'] = nulldata
            tx['senders'] = senders
            tx['fee'] = total_in - total_out

            # track the order of nulldata-containing transactions in this block
            if not nulldata_tx_map.has_key(block_number):
                nulldata_tx_map[block_number] = [(tx_index, tx)]

            else:
                nulldata_tx_map[block_number].append((tx_index, tx))

            # maybe done with this block
            # NOTE will be called multiple times; we expect the last write to be the total time taken by this block
            total_time = time.time() - block_times[block_number]
            block_bandwidth[block_number] = bandwidth_record(total_time, None)

        # record bandwidth information
        for block_number in block_slice:

            block_data = None

            if nulldata_tx_map.has_key(block_number):

                tx_list = nulldata_tx_map[block_number]  # [(tx_index, tx)]
                tx_list.sort(
                )  # sorts on tx_index--preserves order in the block

                txs = [tx for (_, tx) in tx_list]
                block_data = txs

            if not block_bandwidth.has_key(block_number):

                # done with this block now
                total_time = time.time() - block_times[block_number]
                block_bandwidth[block_number] = bandwidth_record(
                    total_time, block_data)

        block_tx_time_end = time.time()
        block_nulldata_tx_time_end = time.time()

        end_slice_time = time.time()

        total_processing_time = sum(
            map(lambda block_id: block_bandwidth[block_id]["time"],
                block_bandwidth.keys()))
        total_data = sum(
            map(lambda block_id: block_bandwidth[block_id]["size"],
                block_bandwidth.keys()))

        block_hash_time = block_hash_time_end - block_hash_time_start
        block_data_time = block_data_time_end - block_data_time_start
        block_tx_time = block_tx_time_end - block_tx_time_start
        block_nulldata_tx_time = block_nulldata_tx_time_end - block_nulldata_tx_time_start

        # log some stats...
        log.debug("blocks %s-%s (%s):" %
                  (block_slice[0], block_slice[-1], len(block_slice)))
        log.debug("  Time total:     %s" % total_processing_time)
        log.debug("  Data total:     %s" % total_data)
        log.debug("  Total goodput:  %s" % (total_data /
                                            (total_processing_time + 1e-7)))
        log.debug("  block hash time:        %s" % block_hash_time)
        log.debug("  block data time:        %s" % block_data_time)
        log.debug("  block tx time:          %s" % block_tx_time)
        log.debug("  block nulldata tx time: %s" % block_nulldata_tx_time)

        # next slice
        slice_count += 1

    # get the blockchain-ordered list of nulldata-containing transactions.
    # this is the blockchain-agreed list of all virtual chain operations, as well as the amount paid per transaction and the
    # principal(s) who created each transaction.
    # convert {block_number: [tx]} to [(block_number, [tx])] where [tx] is ordered by the order in which the transactions occurred in the block
    for block_number in blocks_ids:

        txs = []

        if block_number in nulldata_tx_map.keys():
            tx_list = nulldata_tx_map[block_number]  # [(tx_index, tx)]
            tx_list.sort()  # sorts on tx_index--preserves order in the block

            # preserve index
            for (tx_index, tx) in tx_list:
                tx['txindex'] = tx_index

            txs = [tx for (_, tx) in tx_list]

        nulldata_txs.append((block_number, txs))

    return nulldata_txs
def parse_nameop( opcode, payload, fake_pubkey, recipient=None, recipient_address=None, import_update_hash=None ):

    opcode_name = OPCODE_NAMES[opcode]
    pubk = pybitcoin.BitcoinPublicKey(fake_pubkey)
    address = pubk.address()
    script_pubkey = pybitcoin.make_pay_to_address_script( address )
    senders = [{
        "script_pubkey": script_pubkey,
        "script_type": "pubkeyhash",
        "addresses": [ address ]
    }]

    # just enough to get the public key
    inputs = [{
        "scriptSig": {
            "asm": "ignored %s" % fake_pubkey,
        }
    }]

    script = "OP_RETURN %s" % payload

    try:
        scripthex = pybitcoin.make_op_return_script( payload )
    except:
        if len(payload) == 0:
            scripthex = "6a"
        else:
            raise

    outputs = [{
        "scriptPubKey": {
            "asm": script,
            "hex": scripthex,
            "addresses": []
        }}]

    if recipient_address is not None:
        script = "OP_DUP OP_HASH160 %s OP_EQUALVERIFY OP_CHECKSIG" % binascii.hexlify( pybitcoin.bin_double_sha256( fake_pubkey ) )
        scripthex = pybitcoin.make_pay_to_address_script( recipient_address )
        outputs.append( {
            "scriptPubKey": {
                "asm": script,
                "hex": scripthex,
                "addresses": [ recipient_address ]
            }
        })

    if import_update_hash is not None:
        script = "OP_DUP OP_HASH160 %s OP_EQUALVERIFY OP_CHECKSIG" % import_update_hash
        scripthex = pybitcoin.make_pay_to_address_script( pybitcoin.hex_hash160_to_address( import_update_hash ) )
        outputs.append( {
            "scriptPubKey": {
                "asm": script,
                "hex": scripthex,
                "addresses": [ pybitcoin.hex_hash160_to_address(import_update_hash) ]
            }
        })
   
    try:
        op = op_extract( opcode_name, payload, senders, inputs, outputs, 373601, 0, "00" * 64 )  
    except AssertionError, ae:
        # failed to parse
        return None
예제 #16
0
	def __init__(self, data):
		bindoublehash = bin_double_sha256(data) 
		reversedhash = reverse_hash(bindoublehash)
		self.dgst = reversedhash
예제 #17
0
def parse_nameop(opcode,
                 payload,
                 fake_pubkey,
                 recipient=None,
                 recipient_address=None,
                 import_update_hash=None):

    opcode_name = OPCODE_NAMES[opcode]
    pubk = pybitcoin.BitcoinPublicKey(fake_pubkey)
    address = pubk.address()
    script_pubkey = pybitcoin.make_pay_to_address_script(address)
    senders = [{
        "script_pubkey": script_pubkey,
        "script_type": "pubkeyhash",
        "addresses": [address]
    }]

    # just enough to get the public key
    inputs = [{
        "scriptSig": {
            "asm": "ignored %s" % fake_pubkey,
        }
    }]

    script = "OP_RETURN %s" % payload

    try:
        scripthex = pybitcoin.make_op_return_script(payload)
    except:
        if len(payload) == 0:
            scripthex = "6a"
        else:
            raise

    outputs = [{
        "scriptPubKey": {
            "asm": script,
            "hex": scripthex,
            "addresses": []
        }
    }]

    if recipient_address is not None:
        script = "OP_DUP OP_HASH160 %s OP_EQUALVERIFY OP_CHECKSIG" % binascii.hexlify(
            pybitcoin.bin_double_sha256(fake_pubkey))
        scripthex = pybitcoin.make_pay_to_address_script(recipient_address)
        outputs.append({
            "scriptPubKey": {
                "asm": script,
                "hex": scripthex,
                "addresses": [recipient_address]
            }
        })

    if import_update_hash is not None:
        script = "OP_DUP OP_HASH160 %s OP_EQUALVERIFY OP_CHECKSIG" % import_update_hash
        scripthex = pybitcoin.make_pay_to_address_script(
            pybitcoin.hex_hash160_to_address(import_update_hash))
        outputs.append({
            "scriptPubKey": {
                "asm": script,
                "hex": scripthex,
                "addresses":
                [pybitcoin.hex_hash160_to_address(import_update_hash)]
            }
        })

    try:
        op = op_extract(opcode_name, payload, senders, inputs, outputs, 373601,
                        0, "00" * 64)
    except AssertionError, ae:
        # failed to parse
        return None
예제 #18
0
def get_nulldata_txs_in_blocks( workpool, bitcoind_opts, blocks_ids, first_block_hash=None ):
   """
   Obtain the set of transactions over a range of blocks that have an OP_RETURN with nulldata.
   Each returned transaction record will contain:
   * vin (list of inputs from bitcoind)
   * vout (list of outputs from bitcoind)
   * txid (transaction ID, as a hex string)
   * txindex (transaction index in the block)
   * senders (a list of {"script_pubkey":, "amount":, and "addresses":} dicts; the "script_pubkey" field is the hex-encoded op script).
   * fee (total amount sent)
   * nulldata (input data to the transaction's script; encodes virtual chain operations)
   
   Farm out the requisite RPCs to a workpool of processes, each 
   of which have their own bitcoind RPC client.
   
   Returns [(block_number, [txs])], where each tx contains the above.
   """
   
   nulldata_tx_map = {}    # {block_number: {"tx": [tx]}}
   block_bandwidth = {}    # {block_number: {"time": time taken to process, "size": number of bytes}}
   nulldata_txs = []
   
   # break work up into slices of blocks, so we don't run out of memory 
   slice_len = multiprocess_batch_size( bitcoind_opts )
   slice_count = 0
   last_block_hash = first_block_hash
   
   while slice_count * slice_len < len(blocks_ids):
      
      block_hashes = {}  # map block ID to block hash 
      block_datas = {}    # map block hashes to block data
      block_hash_futures = []
      block_data_futures = []
      tx_futures = []
      nulldata_tx_futures = []
      all_nulldata_tx_futures = []
      block_times = {}          # {block_number: time taken to process}
      
      block_slice = blocks_ids[ (slice_count * slice_len) : min((slice_count+1) * slice_len, len(blocks_ids)) ]
      if len(block_slice) == 0:
         log.debug("Zero-length block slice")
         break
      
      start_slice_time = time.time()
     
      # get all block hashes 
      for block_number in block_slice:
         
         block_times[block_number] = time.time() 
         
         block_hash_fut = getblockhash_async( workpool, bitcoind_opts, block_number )
         block_hash_futures.append( (block_number, block_hash_fut) ) 
   
      # coalesce all block hashes
      block_hash_time_start = time.time()
      block_hash_time_end = 0
      
      for i in xrange(0, len(block_hash_futures)):
         
         block_number, block_hash_fut = future_next( block_hash_futures, lambda f: f[1] )
         
         # NOTE: interruptable blocking get(), but should not block since future_next found one that's ready
         block_hash = future_get_result( block_hash_fut, 10000000000000000L )
         block_hashes[block_number] = block_hash
       
         # start getting each block's data
         if block_hash is not None:
             block_data_fut = getblock_async( workpool, bitcoind_opts, block_hash )
             block_data_futures.append( (block_number, block_data_fut) )

         else:
             raise Exception("BUG: Block %s: no block hash" % block_number)
     
      block_data_time_start = time.time()
      block_data_time_end = 0
     
      # coalesce block data
      for i in xrange(0, len(block_data_futures)):
         
         block_number, block_data_fut = future_next( block_data_futures, lambda f: f[1] )
         block_hash_time_end = time.time()
         
         # NOTE: interruptable blocking get(), but should not block since future_next found one that's ready
         block_data = future_get_result( block_data_fut, 1000000000000000L )
         
         if 'tx' not in block_data:
             raise Exception("BUG: No tx data in block %s" % block_number)
         
         block_datas[ block_hashes[block_number] ] = block_data
     

      # verify blockchain headers
      for i in xrange(0, len(block_slice)):
          block_id = block_slice[i]
          block_hash = block_hashes[block_id]

          prev_block_hash = None
          if i > 0:
              prev_block_id = block_slice[i-1]
              prev_block_hash = block_hashes[prev_block_id]

          elif last_block_hash is not None:
              prev_block_hash = last_block_hash 

          else:
              continue

          if not block_header_verify( block_datas[block_hash], prev_block_hash, block_hash ):
              serialized_header = block_header_to_hex( block_datas[block_hash], prev_block_hash )
              candidate_hash_reversed = pybitcoin.bin_double_sha256(binascii.unhexlify(serialized_header))
              candidate_hash = binascii.hexlify(candidate_hash_reversed[::-1])
              raise Exception("Hash mismatch on block %s: got invalid block hash (expected %s, got %s)" % (block_id, block_hash, candidate_hash))

      last_block_hash = block_hashes[ block_slice[-1] ]

      for block_number in block_slice:
         
         block_hash = block_hashes[block_number]
         block_data = block_datas[block_hash]
         
         # verify block data txs
         rc = block_verify( block_data )
         if not rc:
             raise Exception("Hash mismatch on block %s: got invalid Merkle root (expected %s)" % (block_hash, block_data['merkleroot']))

         # go get each transaction
         tx_hashes = block_data['tx']
         
         log.debug("Get %s transactions from block %s" % (len(tx_hashes), block_hash))
         
         # can get transactions asynchronously with a workpool (but preserve tx order!)
         if len(tx_hashes) > 0:
           
            for j in xrange(0, len(tx_hashes)):
               
               tx_hash = tx_hashes[j]
               tx_fut = getrawtransaction_async( workpool, bitcoind_opts, tx_hash, 1 )
               tx_futures.append( (block_number, j, tx_fut) )
            
         else:
            
            raise Exception("BUG: Zero-transaction block %s" % block_number)
           
      block_tx_time_start = time.time()
      block_tx_time_end = 0
      
      # coalesce raw transaction queries...
      for i in xrange(0, len(tx_futures)):
         
         block_number, tx_index, tx_fut = future_next( tx_futures, lambda f: f[2] )
         block_data_time_end = time.time()
         
         # NOTE: interruptable blocking get(), but should not block since future_next found one that's ready
         tx = future_get_result( tx_fut, 1000000000000000L )
         
         #if len(tx['vin']) > 0 and 'coinbase' not in tx['vin'][0].keys():
         if not tx_is_coinbase( tx ):

             # verify non-coinbase transaction 
             tx_hash = tx['txid']
             if not tx_verify( tx, tx_hash ):
                 raise Exception("Transaction hash mismatch in %s (index %s) in block %s" % (tx['txid'], tx_index, block_number))

         if tx and has_nulldata(tx):
            
            # go get input transactions for this transaction (since it's the one with nulldata, i.e., a virtual chain operation),
            # but tag each future with the hash of the current tx, so we can reassemble the in-flight inputs back into it. 
            nulldata_tx_futs_and_output_idxs = process_nulldata_tx_async( workpool, bitcoind_opts, tx )
            nulldata_tx_futures.append( (block_number, tx_index, tx, nulldata_tx_futs_and_output_idxs) )
            
         else:
            
            # maybe done with this block
            # NOTE will be called multiple times; we expect the last write to be the total time taken by this block
            total_time = time.time() - block_times[ block_number ]
            block_bandwidth[ block_number ] = bandwidth_record( total_time, None )
             
      block_nulldata_tx_time_start = time.time()
      block_nulldata_tx_time_end = 0
      
      # coalesce queries on the inputs to each nulldata transaction from this block...
      for (block_number, tx_index, tx, nulldata_tx_futs_and_output_idxs) in nulldata_tx_futures:
         
         if ('vin' not in tx) or ('vout' not in tx) or ('txid' not in tx):
            continue 
         
         outputs = tx['vout']
         
         total_in = 0   # total input paid
         senders = []
         ordered_senders = []
         
         # gather this tx's nulldata-bearing transactions
         for i in xrange(0, len(nulldata_tx_futs_and_output_idxs)):
            
            input_idx, input_tx_fut, tx_output_index = future_next( nulldata_tx_futs_and_output_idxs, lambda f: f[1] )
            
            # NOTE: interruptable blocking get(), but should not block since future_next found one that's ready
            input_tx = future_get_result( input_tx_fut, 1000000000000000L )
            input_tx_hash = input_tx['txid']

            # verify (but skip coinbase) 
            if not tx_is_coinbase( input_tx ):
                try:
                    if not tx_verify( input_tx, input_tx_hash ):
                        raise Exception("Input transaction hash mismatch %s from tx %s (index %s)" % (input_tx['txid'], tx['txid'], tx_output_index))
                except:
                    pp = pprint.PrettyPrinter()
                    pp.pprint(input_tx)
                    raise

            sender, amount_in = get_sender_and_amount_in_from_txn( input_tx, tx_output_index )
            
            if sender is None or amount_in is None:
               continue
            
            total_in += amount_in 
            
            # preserve sender order...
            ordered_senders.append( (input_idx, sender) )
         
         # sort on input_idx, so the list of senders matches the given transaction's list of inputs
         ordered_senders.sort()
         senders = [sender for (_, sender) in ordered_senders]
         
         total_out = get_total_out( outputs )
         nulldata = get_nulldata( tx )
      
         # extend tx to explicitly record its nulldata (i.e. the virtual chain op),
         # the list of senders (i.e. their script hexs),
         # and the total amount paid
         tx['nulldata'] = nulldata
         tx['senders'] = senders
         tx['fee'] = total_in - total_out
         
         # track the order of nulldata-containing transactions in this block
         if not nulldata_tx_map.has_key( block_number ):
            nulldata_tx_map[ block_number ] = [(tx_index, tx)]
            
         else:
            nulldata_tx_map[ block_number ].append( (tx_index, tx) )
            
         # maybe done with this block
         # NOTE will be called multiple times; we expect the last write to be the total time taken by this block
         total_time = time.time() - block_times[ block_number ]
         block_bandwidth[ block_number ] = bandwidth_record( total_time, None )
            
      # record bandwidth information 
      for block_number in block_slice:
         
         block_data = None
         
         if nulldata_tx_map.has_key( block_number ):
            
            tx_list = nulldata_tx_map[ block_number ]     # [(tx_index, tx)]
            tx_list.sort()                                # sorts on tx_index--preserves order in the block
            
            txs = [ tx for (_, tx) in tx_list ]
            block_data = txs 
            
         if not block_bandwidth.has_key( block_number ):
            
            # done with this block now 
            total_time = time.time() - block_times[ block_number ]
            block_bandwidth[ block_number ] = bandwidth_record( total_time, block_data )
         
         
      block_tx_time_end = time.time()
      block_nulldata_tx_time_end = time.time()
   
      end_slice_time = time.time()
      
      total_processing_time = sum( map( lambda block_id: block_bandwidth[block_id]["time"], block_bandwidth.keys() ) )
      total_data = sum( map( lambda block_id: block_bandwidth[block_id]["size"], block_bandwidth.keys() ) )
      
      block_hash_time = block_hash_time_end - block_hash_time_start 
      block_data_time = block_data_time_end - block_data_time_start
      block_tx_time = block_tx_time_end - block_tx_time_start 
      block_nulldata_tx_time = block_nulldata_tx_time_end - block_nulldata_tx_time_start
      
      # log some stats...
      log.debug("blocks %s-%s (%s):" % (block_slice[0], block_slice[-1], len(block_slice)) )
      log.debug("  Time total:     %s" % total_processing_time )
      log.debug("  Data total:     %s" % total_data )
      log.debug("  Total goodput:  %s" % (total_data / (total_processing_time + 1e-7)))
      log.debug("  block hash time:        %s" % block_hash_time)
      log.debug("  block data time:        %s" % block_data_time)
      log.debug("  block tx time:          %s" % block_tx_time)
      log.debug("  block nulldata tx time: %s" % block_nulldata_tx_time)
      
      # next slice
      slice_count += 1
   
   # get the blockchain-ordered list of nulldata-containing transactions.
   # this is the blockchain-agreed list of all virtual chain operations, as well as the amount paid per transaction and the 
   # principal(s) who created each transaction.
   # convert {block_number: [tx]} to [(block_number, [tx])] where [tx] is ordered by the order in which the transactions occurred in the block
   for block_number in blocks_ids:
      
      txs = []
      
      if block_number in nulldata_tx_map.keys():
         tx_list = nulldata_tx_map[ block_number ]     # [(tx_index, tx)]
         tx_list.sort()                                # sorts on tx_index--preserves order in the block
         
         # preserve index
         for (tx_index, tx) in tx_list:
             tx['txindex'] = tx_index

         txs = [ tx for (_, tx) in tx_list ]

      nulldata_txs.append( (block_number, txs) )
      
   return nulldata_txs
예제 #19
0
def testnet_encode( pk_wif ):
    s = pybitcoin.b58check_decode(pk_wif )
    s = '\xef' + s
    ret = base58.b58encode( s + pybitcoin.bin_double_sha256(s)[0:4] )
    return ret
예제 #20
0
            log.error("Failed to parse transactions")
            return None

        try:
            for resp in resp_json:
                assert 'result' in resp, "Missing result"

                txhex = resp['result']
                assert txhex is not None, "Invalid RPC response '%s' (for %s)" % (simplejson.dumps(resp), txids[resp['id']])

                try:

                    tx_bin = txhex.decode('hex')
                    assert tx_bin is not None

                    tx_hash_bin = pybitcoin.bin_double_sha256(tx_bin)[::-1]
                    assert tx_hash_bin is not None

                    tx_hash = tx_hash_bin.encode('hex')
                    assert tx_hash is not None

                except Exception, e:
                    log.error("Failed to calculate txid of %s" % txhex)
                    raise

                # solicited transaction?
                assert tx_hash in txids, "Unsolicited transaction %s" % tx_hash
                
                # unique?
                if tx_hash in ret.keys():
                    continue