def memorypool_update(self):

        mempool_hashes = self.bitcoind('getrawmempool')

        for tx_hash in mempool_hashes:
            if tx_hash in self.mempool_hashes: continue

            tx = self.get_mempool_transaction(tx_hash)
            if not tx: continue

            for x in tx.get('inputs'):
                txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
                try:
                    h160 = self.db.Get(txi)
                    addr = hash_160_to_bc_address(h160)
                except:
                    continue
                l = self.mempool_addresses.get(tx_hash, [])
                if addr not in l: 
                    l.append( addr )
                    self.mempool_addresses[tx_hash] = l

            for x in tx.get('outputs'):
                addr = x.get('address')
                l = self.mempool_addresses.get(tx_hash, [])
                if addr not in l: 
                    l.append( addr )
                    self.mempool_addresses[tx_hash] = l

            self.mempool_hashes.append(tx_hash)

        # remove older entries from mempool_hashes
        self.mempool_hashes = mempool_hashes

        # remove deprecated entries from mempool_addresses
        for tx_hash, addresses in self.mempool_addresses.items():
            if tx_hash not in self.mempool_hashes:
                self.mempool_addresses.pop(tx_hash)

        # rebuild histories
        new_mempool_hist = {}
        for tx_hash, addresses in self.mempool_addresses.items():
            for addr in addresses:
                h = new_mempool_hist.get(addr, [])
                if tx_hash not in h: 
                    h.append( tx_hash )
                new_mempool_hist[addr] = h

        for addr in new_mempool_hist.keys():
            if addr in self.mempool_hist.keys():
                if self.mempool_hist[addr] != new_mempool_hist[addr]: 
                    self.invalidate_cache(addr)
            else:
                self.invalidate_cache(addr)

        with self.mempool_lock:
            self.mempool_hist = new_mempool_hist
    def import_block(self, block, block_hash, block_height, sync, revert=False):

        self.batch_list = {}  # address -> history
        self.batch_txio = {}  # transaction i/o -> address

        block_inputs = []
        block_outputs = []
        addr_to_read = []

        # deserialize transactions
        t0 = time.time()
        tx_hashes, txdict = self.deserialize_block(block)

        t00 = time.time()


        if not revert:
            # read addresses of tx inputs
            for tx in txdict.values():
                for x in tx.get('inputs'):
                    txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
                    block_inputs.append(txi)

            block_inputs.sort()
            for txi in block_inputs:
                try:
                    addr = self.db.Get(txi)
                except:
                    # the input could come from the same block
                    continue
                self.batch_txio[txi] = addr
                addr_to_read.append(addr)

        else:
            for txid, tx in txdict.items():
                for x in tx.get('outputs'):
                    txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex')
                    block_outputs.append(txo)
            
        # read histories of addresses
        for txid, tx in txdict.items():
            for x in tx.get('outputs'):
                hash_160 = bc_address_to_hash_160(x.get('address'))
                addr_to_read.append(hash_160)

        addr_to_read.sort()
        for addr in addr_to_read:
            try:
                self.batch_list[addr] = self.db.Get(addr)
            except: 
                self.batch_list[addr] = ''


        if revert: 
            undo_info = self.get_undo_info(block_height)
            # print "undo", block_height, undo_info
        else: undo_info = {}

        # process
        t1 = time.time()

        if revert: tx_hashes = tx_hashes[::-1]
        for txid in tx_hashes: # must be ordered
            tx = txdict[txid]
            if not revert:

                undo = []
                for x in tx.get('inputs'):
                    prevout_height, prevout_addr = self.remove_from_history( None, x.get('prevout_hash'), x.get('prevout_n'))
                    undo.append( (prevout_height, prevout_addr) )
                undo_info[txid] = undo

                for x in tx.get('outputs'):
                    hash_160 = bc_address_to_hash_160(x.get('address'))
                    self.add_to_history( hash_160, txid, x.get('index'), block_height)
                    
            else:
                for x in tx.get('outputs'):
                    hash_160 = bc_address_to_hash_160(x.get('address'))
                    self.remove_from_history( hash_160, txid, x.get('index'))

                i = 0
                for x in tx.get('inputs'):
                    prevout_height, prevout_addr = undo_info.get(txid)[i]
                    i += 1

                    # read the history into batch list
                    if self.batch_list.get(prevout_addr) is None:
                        self.batch_list[prevout_addr] = self.db.Get(prevout_addr)

                    # re-add them to the history
                    self.add_to_history( prevout_addr, x.get('prevout_hash'), x.get('prevout_n'), prevout_height)
                    # print_log( "new hist for", hash_160_to_bc_address(prevout_addr), self.deserialize(self.batch_list[prevout_addr]) )

        # write
        max_len = 0
        max_addr = ''
        t2 = time.time()

        batch = leveldb.WriteBatch()
        for addr, serialized_hist in self.batch_list.items():
            batch.Put(addr, serialized_hist)
            l = len(serialized_hist)
            if l > max_len:
                max_len = l
                max_addr = addr

        if not revert:
            # add new created outputs
            for txio, addr in self.batch_txio.items():
                batch.Put(txio, addr)
            # delete spent inputs
            for txi in block_inputs:
                batch.Delete(txi)
            # add undo info 
            self.write_undo_info(batch, block_height, undo_info)
        else:
            # restore spent inputs
            for txio, addr in self.batch_txio.items():
                batch.Put(txio, addr)
            # delete spent outputs
            for txo in block_outputs:
                batch.Delete(txo)


        # add the max
        batch.Put('height', self.serialize( [(block_hash, block_height, 0)] ) )

        # actual write
        self.db.Write(batch, sync = sync)

        t3 = time.time()
        if t3 - t0 > 10 and not sync: 
            print_log("block", block_height, 
                      "parse:%0.2f "%(t00 - t0), 
                      "read:%0.2f "%(t1 - t00), 
                      "proc:%.2f "%(t2-t1), 
                      "write:%.2f "%(t3-t2), 
                      "max:", max_len, hash_160_to_bc_address(max_addr))

        for h160 in self.batch_list.keys(): 
            addr = hash_160_to_bc_address(h160)
            self.invalidate_cache(addr)