def test_block(block_details): coin, block_info = block_details block = unhexlify(block_info['block']) h, txs = coin.block_full(block, block_info['height']) assert coin.header_hash(h) == hex_str_to_hash(block_info['hash']) assert coin.header_prevhash(h) == hex_str_to_hash(block_info['previousblockhash']) for n, tx in enumerate(txs): _, txid = tx assert txid == hex_str_to_hash(block_info['tx'][n])
async def make_raw_header(self, b): pbh = b.get('previousblockhash') if pbh is None: pbh = '0' * 64 header = pack('<L', b.get('version')) \ + hex_str_to_hash(pbh) \ + hex_str_to_hash(b.get('merkleroot')) \ + pack('<L', self.timestamp_safe(b['time'])) \ + pack('<L', int(b.get('bits'), 16)) \ + pack('<L', int(b.get('nonce'))) return header
def test_block(block_details): coin, block_info = block_details raw_block = unhexlify(block_info['block']) block = coin.block(raw_block, block_info['height']) assert coin.header_hash(block.header) == hex_str_to_hash( block_info['hash']) assert (coin.header_prevhash(block.header) == hex_str_to_hash( block_info['previousblockhash'])) for n, (tx, txid) in enumerate(block.transactions): assert txid == hex_str_to_hash(block_info['tx'][n])
async def tx_merkle(self, tx_hash, height): '''tx_hash is a hex string.''' hex_hashes = await self.daemon_request('block_hex_hashes', height, 1) block = await self.daemon_request('deserialised_block', hex_hashes[0]) tx_hashes = block['tx'] try: pos = tx_hashes.index(tx_hash) except ValueError: raise RPCError( BAD_REQUEST, f'tx hash {tx_hash} not in ' f'block {hex_hashes[0]} at height {height:,d}') idx = pos hashes = [hex_str_to_hash(txh) for txh in tx_hashes] merkle_branch = [] while len(hashes) > 1: if len(hashes) & 1: hashes.append(hashes[-1]) idx = idx - 1 if (idx & 1) else idx + 1 merkle_branch.append(hash_to_str(hashes[idx])) idx //= 2 hashes = [ double_sha256(hashes[n] + hashes[n + 1]) for n in range(0, len(hashes), 2) ] return {"block_height": height, "merkle": merkle_branch, "pos": pos}
def scripthash_to_hashX(self, scripthash): try: bin_hash = hex_str_to_hash(scripthash) if len(bin_hash) == 32: return bin_hash[:HASHX_LEN] except Exception: pass raise RPCError(BAD_REQUEST, f'{scripthash} is not a valid script hash')
def scripthash_to_hashX(self, scripthash): try: bin_hash = hex_str_to_hash(scripthash) if len(bin_hash) == 32: return bin_hash[:self.coin.HASHX_LEN] except Exception: pass raise RPCError('{} is not a valid script hash'.format(scripthash))
def get_utxos(self, hashX): '''Return an unordered list of UTXO named tuples from mempool transactions that pay to hashX. This does not consider if any other mempool transactions spend the outputs. ''' utxos = [] # hashXs is a defaultdict, so use get() to query for hex_hash in self.hashXs.get(hashX, []): item = self.txs.get(hex_hash) if not item: continue txout_pairs = item[1] for pos, (hX, value) in enumerate(txout_pairs): if hX == hashX: utxos.append( UTXO(-1, pos, hex_str_to_hash(hex_hash), 0, value)) return utxos
def process_raw_txs(self, raw_tx_map, pending): '''Process the dictionary of raw transactions and return a dictionary of updates to apply to self.txs. This runs in the executor so should not update any member variables it doesn't own. Atomic reads of self.txs that do not depend on the result remaining the same are fine. ''' script_hashX = self.coin.hashX_from_script deserializer = self.coin.DESERIALIZER db_utxo_lookup = self.db.db_utxo_lookup txs = self.txs # Deserialize each tx and put it in a pending list for tx_hash, raw_tx in raw_tx_map.items(): if tx_hash not in txs: continue tx, tx_size = deserializer(raw_tx).read_tx_and_vsize() # Convert the tx outputs into (hashX, value) pairs txout_pairs = [(script_hashX(txout.pk_script), txout.value) for txout in tx.outputs] # Convert the tx inputs to ([prev_hex_hash, prev_idx) pairs txin_pairs = [(hash_to_str(txin.prev_hash), txin.prev_idx) for txin in tx.inputs] pending.append((tx_hash, txin_pairs, txout_pairs, tx_size)) # Now process what we can result = {} deferred = [] for item in pending: if self.stop: break tx_hash, old_txin_pairs, txout_pairs, tx_size = item if tx_hash not in txs: continue mempool_missing = False txin_pairs = [] try: for prev_hex_hash, prev_idx in old_txin_pairs: tx_info = txs.get(prev_hex_hash, 0) if tx_info is None: tx_info = result.get(prev_hex_hash) if not tx_info: mempool_missing = True continue if tx_info: txin_pairs.append(tx_info[1][prev_idx]) elif not mempool_missing: prev_hash = hex_str_to_hash(prev_hex_hash) txin_pairs.append(db_utxo_lookup(prev_hash, prev_idx)) except (self.db.MissingUTXOError, self.db.DBError): # DBError can happen when flushing a newly processed # block. MissingUTXOError typically happens just # after the daemon has accepted a new block and the # new mempool has deps on new txs in that block. continue if mempool_missing: deferred.append(item) else: # Compute fee tx_fee = (sum(v for hashX, v in txin_pairs) - sum(v for hashX, v in txout_pairs)) result[tx_hash] = (txin_pairs, txout_pairs, tx_fee, tx_size) return result, deferred
def test_hex_str_to_hash(): assert lib_hash.hex_str_to_hash('7274735f6f745f68736168') == b'hash_to_str'
def process_raw_txs(self, raw_tx_map, pending): '''Process the dictionary of raw transactions and return a dictionary of updates to apply to self.txs. This runs in the executor so should not update any member variables it doesn't own. Atomic reads of self.txs that do not depend on the result remaining the same are fine. ''' script_hashX = self.coin.hashX_from_script deserializer = self.coin.DESERIALIZER db_utxo_lookup = self.db.db_utxo_lookup txs = self.txs # Deserialize each tx and put it in our priority queue for tx_hash, raw_tx in raw_tx_map.items(): if tx_hash not in txs: continue tx, _tx_hash = deserializer(raw_tx).read_tx() # Convert the tx outputs into (hashX, value) pairs # tx的output为hashX,value,通过script_hashX处理hashX,实际上就是将所有P2PK转换为P2PKH,以节省内存存储空间 txout_pairs = [(script_hashX(txout.pk_script), txout.value) for txout in tx.outputs] # Convert the tx inputs to ([prev_hex_hash, prev_idx) pairs # tx的input为utxo的tx_hash和tx_idx txin_pairs = [(hash_to_str(txin.prev_hash), txin.prev_idx) for txin in tx.inputs] pending.append((tx_hash, txin_pairs, txout_pairs)) # Now process what we can result = {} deferred = [] for item in pending: if self.stop: break tx_hash, old_txin_pairs, txout_pairs = item if tx_hash not in txs: continue mempool_missing = False txin_pairs = [] try: #之所以叫old_txin_pairs是因为里面存的是tx_hash,tx_idx,需要等待转换为与txout一样的 #utxo所在的hashX与amount for prev_hex_hash, prev_idx in old_txin_pairs: #txs.get存在三种情况 #1 prev_hex_hash已经被确认,则txs中不会存在,返回0 #2 prev_hex_hash是本轮刚刚收到的新交易,在之前被设置为None,则此处返回None #3 prev_hex_hash是本轮之前收到的新交易,但是目前还没有被确认,此处返回其数据tx_info tx_info = txs.get(prev_hex_hash, 0) if tx_info is None: #情况2,虽然是刚刚接收到的新交易,但是看看交易是否较当前交易早接收到,如果是则已经被处理了 tx_info = result.get(prev_hex_hash) if not tx_info: #情况2,并且不在本轮已经被处理完成的请求内,则放到deferred里 mempool_missing = True continue if tx_info: #数据在内存中,取该交易的txout的第prev_idx个utxo,放到当前的txin_pairs里 txin_pairs.append(tx_info[1][prev_idx]) elif not mempool_missing: #如果是情况2的最终情况,则尝试从数据库里取得,应该在数据库里 prev_hash = hex_str_to_hash(prev_hex_hash) #注意这里取出的txin已经从(tx_hash, tx_idx)转换为了(hashX, value),即utxo的地址和支付数量 txin_pairs.append(db_utxo_lookup(prev_hash, prev_idx)) except (self.db.MissingUTXOError, self.db.DBError): # DBError can happen when flushing a newly processed # block. MissingUTXOError typically happens just # after the daemon has accepted a new block and the # new mempool has deps on new txs in that block. continue if mempool_missing: #加入延期 deferred.append(item) else: #放到result里,可以供后续查找 result[tx_hash] = (txin_pairs, txout_pairs) return result, deferred