def get_history(self, addr, cache_only=False): with self.cache_lock: hist = self.history_cache.get( addr ) if hist is not None: return hist if cache_only: return -1 with self.dblock: try: hash_160 = bc_address_to_hash_160(addr) hist = self.deserialize(self.db.Get(hash_160)) is_known = True except: hist = [] is_known = False # should not be necessary hist.sort( key=lambda tup: tup[1]) # check uniqueness too... # add memory pool with self.mempool_lock: for txid in self.mempool_hist.get(addr,[]): hist.append((txid, 0, 0)) hist = map(lambda x: {'tx_hash':x[0], 'height':x[2]}, hist) # add something to distinguish between unused and empty addresses if hist == [] and is_known: hist = ['*'] with self.cache_lock: self.history_cache[addr] = hist return hist
def import_block(self, block, block_hash, block_height, sync, revert=False): self.batch_list = {} # address -> history self.batch_txio = {} # transaction i/o -> address block_inputs = [] block_outputs = [] addr_to_read = [] # deserialize transactions t0 = time.time() tx_hashes, txdict = self.deserialize_block(block) t00 = time.time() if not revert: # read addresses of tx inputs for tx in txdict.values(): for x in tx.get('inputs'): txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex') block_inputs.append(txi) block_inputs.sort() for txi in block_inputs: try: addr = self.db.Get(txi) except: # the input could come from the same block continue self.batch_txio[txi] = addr addr_to_read.append(addr) else: for txid, tx in txdict.items(): for x in tx.get('outputs'): txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex') block_outputs.append(txo) # read histories of addresses for txid, tx in txdict.items(): for x in tx.get('outputs'): hash_160 = bc_address_to_hash_160(x.get('address')) addr_to_read.append(hash_160) addr_to_read.sort() for addr in addr_to_read: try: self.batch_list[addr] = self.db.Get(addr) except: self.batch_list[addr] = '' if revert: undo_info = self.get_undo_info(block_height) # print "undo", block_height, undo_info else: undo_info = {} # process t1 = time.time() if revert: tx_hashes = tx_hashes[::-1] for txid in tx_hashes: # must be ordered tx = txdict[txid] if not revert: undo = [] for x in tx.get('inputs'): prevout_height, prevout_addr = self.remove_from_history( None, x.get('prevout_hash'), x.get('prevout_n')) undo.append( (prevout_height, prevout_addr) ) undo_info[txid] = undo for x in tx.get('outputs'): hash_160 = bc_address_to_hash_160(x.get('address')) self.add_to_history( hash_160, txid, x.get('index'), block_height) else: for x in tx.get('outputs'): hash_160 = bc_address_to_hash_160(x.get('address')) self.remove_from_history( hash_160, txid, x.get('index')) i = 0 for x in tx.get('inputs'): prevout_height, prevout_addr = undo_info.get(txid)[i] i += 1 # read the history into batch list if self.batch_list.get(prevout_addr) is None: self.batch_list[prevout_addr] = self.db.Get(prevout_addr) # re-add them to the history self.add_to_history( prevout_addr, x.get('prevout_hash'), x.get('prevout_n'), prevout_height) # print_log( "new hist for", hash_160_to_bc_address(prevout_addr), self.deserialize(self.batch_list[prevout_addr]) ) # write max_len = 0 max_addr = '' t2 = time.time() batch = leveldb.WriteBatch() for addr, serialized_hist in self.batch_list.items(): batch.Put(addr, serialized_hist) l = len(serialized_hist) if l > max_len: max_len = l max_addr = addr if not revert: # add new created outputs for txio, addr in self.batch_txio.items(): batch.Put(txio, addr) # delete spent inputs for txi in block_inputs: batch.Delete(txi) # add undo info self.write_undo_info(batch, block_height, undo_info) else: # restore spent inputs for txio, addr in self.batch_txio.items(): batch.Put(txio, addr) # delete spent outputs for txo in block_outputs: batch.Delete(txo) # add the max batch.Put('height', self.serialize( [(block_hash, block_height, 0)] ) ) # actual write self.db.Write(batch, sync = sync) t3 = time.time() if t3 - t0 > 10 and not sync: print_log("block", block_height, "parse:%0.2f "%(t00 - t0), "read:%0.2f "%(t1 - t00), "proc:%.2f "%(t2-t1), "write:%.2f "%(t3-t2), "max:", max_len, hash_160_to_bc_address(max_addr)) for h160 in self.batch_list.keys(): addr = hash_160_to_bc_address(h160) self.invalidate_cache(addr)