示例#1
0
    def __init__(self, dsn, address_state_cache_size, logger):
        setproctitle('btcapi server: addresses state module')
        policy = asyncio.get_event_loop_policy()
        policy.set_event_loop(policy.new_event_loop())
        self.dsn = dsn

        self.address_cache = LRU(address_state_cache_size)
        self.affected_existed = MRU()
        self.affected_new = MRU()
        self.missed_addresses = set()

        self.log = logger
        self.threads = 20
        self.cache_limit = 15000000
        self.active = True
        self.db_pool = None
        self.synchronization_task = None
        self.bootstrap_completed = False
        self.start_time = time.time()
        self.last_block = None
        self.batch_last_block = 0

        self.loaded_addresses = 0
        self.requested_addresses = 0

        self.loop = asyncio.get_event_loop()
        signal.signal(signal.SIGTERM, self.terminate)
        self.loop.create_task(self.start())
        self.loop.run_forever()
示例#2
0
    def __init__(self, db_type, db, log):
        self.load_buffer = deque()
        self.loaded_utxo = LRU(100000)  # loaded from db missed records
        self.loaded_ustxo = LRU(100000)  # loaded from db missed records

        self.load_data_future = asyncio.Future()
        self.load_data_future.set_result(True)

        self.log = log
        self.db_type = db_type
        self.db = db
示例#3
0
文件: utxo.py 项目: bitaps-com/pybtc
    def __init__(self, db, rpc, loop, log, cache_size):
        self.cache = MRU()  # utxo cache
        self.restore_blocks_cache = LRU(
        )  # blocks cache for restore utxo cache

        self.missed = set()  # missed utxo
        self.missed_failed = deque()
        self.loaded = dict()  # loaded from db missed records

        self.utxo_records = deque()  # prepared utxo records for write to db
        self.p2pkMapHash = deque()  # prepared utxo records for write to db
        self.pending_p2pkMapHash = deque(
        )  # prepared utxo records for write to db
        self.pending_saved = dict(
        )  # temp hash table, while records write process

        self.scheduled_to_delete = deque()
        self.pending_deleted = deque()

        self.checkpoint = 0
        self.checkpoints = deque()
        self.log = log

        self.size_limit = cache_size
        self.db = db
        self.loop = loop

        self.save_process = False
        self.write_to_db = False

        self.rpc = rpc

        # stats
        self._requests = 0
        self._failed_requests = 0
        self._hit = 0
        self.saved_utxo_count = 0
        self.last_block = 0
        self.deleted_utxo_count = 0
        self.read_from_db_time = 0
        self.read_from_db_batch_time = 0
        self.read_from_db_count = 0
        self.read_from_db_time_total = 0
        self.loaded_utxo_count = 0
示例#4
0
class UTXO():

    def __init__(self, db_type, db,  rpc, loop, log, cache_size):
        self.cache = MRU()  # utxo cache
        self.restore_blocks_cache = LRU()  # blocks cache for restore utxo cache

        self.missed = set()  # missed utxo
        self.missed_failed = deque()
        self.loaded = dict()   # loaded from db missed records

        self.utxo_records = deque()  # prepared utxo records for write to db
        self.p2pkMapHash = deque()  # prepared utxo records for write to db
        self.pending_saved = dict()  # temp hash table, while records write process

        self.scheduled_to_delete = deque()
        self.pending_deleted = deque()


        self.checkpoint = 0
        self.checkpoints = deque()
        self.log = log


        self.size_limit = cache_size
        self.db_type = db_type
        self.db = db
        self.loop = loop

        self.save_process = False
        self.write_to_db = False

        self.rpc = rpc

        # stats
        self._requests = 0
        self._failed_requests = 0
        self._hit = 0
        self.saved_utxo_count = 0
        self.last_block = 0
        self.deleted_utxo_count = 0
        self.read_from_db_time = 0
        self.read_from_db_batch_time = 0
        self.read_from_db_count = 0
        self.read_from_db_time_total = 0
        self.loaded_utxo_count = 0


    def get(self, key):
        #
        # get and destroy unspent coin from cache
        # in case coin in pending saved list, schedule to delete this coin from db
        # in case coin not exist add to missed coin list
        #
        self._requests += 1
        i = None

        try:
            i = self.cache.delete(key)
        except:
            try:
                i = self.pending_saved[key]
                self.scheduled_to_delete.append(key)
            except:
                pass

        if i is None:
            self._failed_requests += 1
            self.missed.add(key)
        else:
            self._hit += 1
        return i


    def set(self, outpoint, pointer, amount, address):
        self.cache[outpoint] = (pointer, amount, address)


    async def load_utxo(self):
        #
        # load missed utxo from db
        #
        try:
            t = time.time()
            self.missed_failed = list()
            failed = False
            if self.db_type == "postgresql":
                async with self.db.acquire() as conn:
                    rows = await conn.fetch("SELECT outpoint, "
                                            "       pointer,"
                                            "       address,"
                                            "       amount "
                                            "FROM connector_utxo "
                                            "WHERE outpoint = ANY($1);", self.missed)
                for row in rows:
                    self.loaded[row["outpoint"]] = (row["pointer"],
                                                    row["amount"],
                                                    row["address"])
                    self.loaded_utxo_count += 1

                if len(self.missed) > len(rows):
                    failed = True
                    for row in rows:
                        self.missed.remove(row["outpoint"])


            elif self.db_type == "rocksdb":
                rows = self.db.multi_get(list(self.missed))
                failed = True if len(self.missed) > len(rows) else False
                for outpoint in rows:
                    d = rows[outpoint]
                    if failed:
                        self.missed.remove(outpoint)
                    pointer = c_int_to_int(d)
                    f = c_int_len(pointer)
                    amount = c_int_to_int(d[f:])
                    f += c_int_len(amount)
                    address = d[f:]
                    self.loaded[outpoint] = (pointer, amount, address)
                    self.loaded_utxo_count += 1

            else:
                for outpoint in self.missed:
                    d = self.db.get(outpoint)
                    if d is None:
                        self.missed_failed.append(outpoint)
                        continue
                    pointer = c_int_to_int(d)
                    f = c_int_len(pointer)
                    amount = c_int_to_int(d[f:])
                    f += c_int_len(amount)
                    address = d[f:]
                    self.loaded[outpoint] = (pointer, amount, address)
                    self.loaded_utxo_count += 1


            self.read_from_db_count += len(self.missed)
            self.read_from_db_time += time.time() - t
            self.read_from_db_batch_time += time.time() - t
            self.read_from_db_time_total += time.time() - t
            if failed:
                self.missed_failed = list(self.missed)
            self.missed= set()
        except:
            raise


    async def load_utxo_from_daemon(self):
        #
        #  load missed utxo from bitcoind daemon
        #
        if not self.missed_failed: return
        missed = chunks_by_count(self.missed_failed, 50)
        for m in missed:
            result = await self.rpc.batch([["getrawtransaction", rh2s(i[:32]), 1] for i in m])
            hash_list = set()
            for r in result:
                if r["result"]["blockhash"] not in self.restore_blocks_cache:
                    hash_list.add(r["result"]["blockhash"])

            result2 = await self.rpc.batch([["getblock", r] for r in hash_list])
            for r in result2:
               self.restore_blocks_cache[r["result"]["hash"]] = r["result"]

            for key, r in zip(m, result):
                out_index = bytes_to_int(key[32:])
                tx=r["result"]
                amount = int(tx["vout"][out_index]["value"] * 100000000)
                script = parse_script(tx["vout"][out_index]["scriptPubKey"]["hex"])
                try:
                    address = b"".join((bytes([script["nType"]]), script["addressHash"]))
                except:
                    address = b"".join((bytes([script["nType"]]), script["script"]))
                block = self.restore_blocks_cache[tx["blockhash"]]

                tx_index = block["tx"].index(tx["txid"])
                block_height = block["height"]
                pointer = (block_height << 39) + (tx_index << 20) + (1 << 19) + out_index
                self.loaded[key] = (pointer, amount, address)
        self.missed_failed = list()
        while len(self.restore_blocks_cache) > 1000:
            self.restore_blocks_cache.pop()


    def get_loaded(self, key):
        try:
            i = self.loaded.pop(key)
            self.scheduled_to_delete.append(key)
            return i
        except:
            return None


    def create_checkpoint(self, last_block, app_last_block = None):
        # check checkpoints state
        self.last_block = last_block
        if  not self.checkpoints: return
        checkpoints = set()
        for i in self.checkpoints:
            if i > self.checkpoint: checkpoints.add(i)
        self.checkpoints = sorted(checkpoints)
        # save to db tail from cache
        if  self.save_process or not self.cache: return
        if app_last_block is not None:
            if app_last_block < self.checkpoints[0]: return

        self.save_process = True
        limit = 0
        try:
            checkpoint = self.checkpoints.pop(0)
            lb = 0
            while self.cache:
                key, value = self.cache.peek_last_item()
                if value[0] >> 39 != lb:
                    # block changed

                    if checkpoint <= lb:
                        # last block was checkpoint block
                        if len(self.utxo_records) > self.size_limit * 0.9:
                            limit = self.size_limit
                        else:
                            limit = self.size_limit * 0.9
                        if len(self.cache) < limit:
                            break

                        if self.checkpoints:
                            if app_last_block is None:
                                # no app checkpoint constraint
                                checkpoint = self.checkpoints.pop(0)
                            elif app_last_block > self.checkpoints[0]:
                                # app checkpoint ahead of utxo checkpoint
                                checkpoint = self.checkpoints.pop(0)
                            else:
                                break
                        else:
                            # no more checkpoints
                            break

                lb = value[0] >> 39

                self.cache.delete(key)
                self.utxo_records.append((key, value[0], value[2], value[1]))
                self.pending_saved[key] = value
            self.last_checkpoint = self.checkpoint
            self.checkpoint = lb

            self.pending_deleted = deque(self.scheduled_to_delete)
            self.scheduled_to_delete = deque()

            self.log.debug("checkpoint %s cache size %s limit %s" % (self.checkpoint,
                                                                     len(self.cache),
                                                                     limit))
        except:
            self.log.critical("create checkpoint error")


    async def commit(self):
        # save to db tail from cache
        if  not self.checkpoint: return
        if  self.write_to_db: return

        try:
            self.write_to_db = True
            t = time.time()
            if not self.checkpoint: return
            if self.db_type == "rocksdb":
                await self.loop.run_in_executor(None, self.rocksdb_atomic_batch)
            elif self.db_type == "leveldb":
                await self.loop.run_in_executor(None, self.leveldb_atomic_batch)
            else:
                await self.postgresql_atomic_batch()
            self.log.debug("utxo checkpoint saved time %s" % round(time.time()-t, 4))
            self.saved_utxo_count += len(self.utxo_records)
            self.deleted_utxo_count += len(self.pending_deleted)
            self.pending_deleted = deque()
            self.utxo_records = deque()
            self.pending_saved = dict()

        except Exception as err:
            self.log.critical("save_checkpoint error: %s" % str(err))
        finally:
            self.save_process = False
            self.write_to_db = False


    def rocksdb_atomic_batch(self):
        batch = rocksdb.WriteBatch()
        [batch.delete(k) for k in self.pending_deleted]
        [batch.put(k[0], k[1]) for k in self.utxo_records]
        batch.put(b"last_block", int_to_bytes(self.checkpoint))
        self.db.write(batch)


    def leveldb_atomic_batch(self):
        with self.db.write_batch() as batch:
            [batch.delete(k) for k in self.pending_deleted]
            [batch.put(k[0], k[1]) for k in self.utxo_records]
            batch.put(b"last_block", int_to_bytes(self.checkpoint))


    async def postgresql_atomic_batch(self):
        async with self.db.acquire() as conn:
            async with conn.transaction():
               if self.pending_deleted:
                   await conn.execute("DELETE FROM connector_utxo WHERE "
                                      "outpoint = ANY($1);", self.pending_deleted)
               if self.utxo_records:
                   await conn.copy_records_to_table('connector_utxo',
                                                    columns=["outpoint",
                                                             "pointer",
                                                             "address",
                                                             "amount"],
                                                    records=self.utxo_records)
               if self.p2pkMapHash:
                   await conn.executemany("INSERT INTO connector_p2pk_map (address, script) "
                                          "VALUES ($1, $2) ON CONFLICT DO NOTHING;", self.p2pkMapHash)
                   self.p2pkMapHash = deque()
               await conn.execute("UPDATE connector_utxo_state SET value = $1 "
                                  "WHERE name = 'last_block';", self.checkpoint)
               await conn.execute("UPDATE connector_utxo_state SET value = $1 "
                                  "WHERE name = 'last_cached_block';", self.last_block)


    def len(self):
        return len(self.cache)


    def hit_rate(self):
        if self._requests:
            return self._hit / self._requests
        else:
            return 0
示例#5
0
class AddressState():
    def __init__(self, dsn, address_state_cache_size, logger):
        setproctitle('btcapi server: addresses state module')
        policy = asyncio.get_event_loop_policy()
        policy.set_event_loop(policy.new_event_loop())
        self.dsn = dsn

        self.address_cache = LRU(address_state_cache_size)
        self.affected_existed = MRU()
        self.affected_new = MRU()
        self.missed_addresses = set()

        self.log = logger
        self.threads = 20
        self.cache_limit = 15000000
        self.active = True
        self.db_pool = None
        self.synchronization_task = None
        self.bootstrap_completed = False
        self.start_time = time.time()
        self.last_block = None
        self.batch_last_block = 0

        self.loaded_addresses = 0
        self.requested_addresses = 0

        self.loop = asyncio.get_event_loop()
        signal.signal(signal.SIGTERM, self.terminate)
        self.loop.create_task(self.start())
        self.loop.run_forever()

    async def start(self):
        try:
            self.db_pool = await asyncpg.create_pool(dsn=self.dsn,
                                                     min_size=1,
                                                     max_size=30)
            self.db_pool_2 = await asyncpg.create_pool(dsn=self.dsn,
                                                       min_size=1,
                                                       max_size=3)
            self.log.info("Addresses state sync module started")
            self.synchronization_task = self.loop.create_task(self.processor())
        except Exception as err:
            self.log.warning("Start addresses state sync module failed: %s" %
                             err)
            await asyncio.sleep(3)
            self.loop.create_task(self.start())

    async def fetch_records(self, query, params):
        async with self.db_pool_2.acquire() as conn:
            rows = await conn.fetch(query, *params)
        return rows

    async def get_records(self, height, limit):
        q = time.time()
        stxo_t = self.loop.create_task(
            self.fetch_records(
                "SELECT address, s_pointer as p, amount as a FROM "
                "stxo WHERE s_pointer >= $1 and s_pointer < $2 "
                "order by s_pointer asc",
                ((height + 1) << 39, (height + 1 + limit + 1) << 39)))

        q = time.time()
        ustxo_t = self.loop.create_task(
            self.fetch_records(
                "SELECT address, pointer as p, amount as a "
                "FROM  stxo WHERE "
                "pointer >= $1 and pointer < $2  "
                "order by pointer asc",
                ((height + 1) << 39, (height + 1 + limit + 1) << 39)))

        q = time.time()
        utxo_t = self.loop.create_task(
            self.fetch_records(
                "SELECT address, pointer as p, amount  as a FROM "
                "connector_utxo WHERE  pointer >= $1 and pointer < $2 "
                "order by pointer asc",
                ((height + 1) << 39, (height + 1 + limit + 1) << 39)))

        q = time.time()
        await asyncio.wait({stxo_t, ustxo_t, utxo_t})

        stxo = stxo_t.result()
        utxo = utxo_t.result()
        ustxo = ustxo_t.result()
        return stxo, utxo, ustxo, height, limit

    async def processor(self):
        blockchain_stat = {
            "inputs": 0,
            "outputs": 0,
            "reused": 0,
            "amountMap": dict()
        }
        height = -1
        previous_height = -1
        last_block_height = -1
        limit = 200
        next_batch = None
        address_cache = self.address_cache
        affected_new = self.affected_new
        affected_existed = self.affected_existed
        block_stat_records = []
        blockchain_stat_records = []
        batch_new = []
        batch_existed = []
        commit_task = False

        while True:
            try:

                qt = time.time()

                ql = time.time()

                if block_stat_records:
                    commit = self.loop.create_task(
                        self.commit_changes(previous_height,
                                            block_stat_records,
                                            blockchain_stat_records, batch_new,
                                            batch_existed))
                    commit_task = True
                block_stat_records = []

                async with self.db_pool.acquire() as conn:

                    if not self.bootstrap_completed:
                        v = await conn.fetchval(
                            "SELECT value FROM service WHERE name = 'bootstrap_completed' LIMIT 1;"
                        )
                        if v == '1':
                            self.bootstrap_completed = True
                        else:
                            await asyncio.sleep(10)
                            continue

                    async with conn.transaction():
                        h = await conn.fetchval(
                            "SELECT height  FROM  blocks  order by height desc LIMIT 1;"
                        )
                        if h is None:
                            await asyncio.sleep(1)
                            continue
                        max_h = h

                        if height == -1:
                            row = await conn.fetchrow(
                                "SELECT height, addresses as b FROM  "
                                "blockchian_address_stat  order by height desc LIMIT 1;"
                            )
                            if row is not None:
                                blockchain_stat = json.loads(row["b"])
                                height = row["height"]
                        else:
                            height = last_block_height

                    if height + limit > max_h:
                        limit = max_h - height - 1

                if next_batch is None:
                    stxo, utxo, ustxo, height, recent_limit = await self.get_records(
                        height, limit)
                else:
                    await next_batch
                    stxo, utxo, ustxo, height, recent_limit = next_batch.result(
                    )
                last_block_height = height + 1 + recent_limit
                first_block_height = height + 1

                if last_block_height + limit > max_h:
                    limit = last_block_height - height - 1
                if last_block_height > max_h:
                    last_block_height = max_h
                next_batch = self.loop.create_task(
                    self.get_records(last_block_height, limit))

                ql = round(time.time() - ql, 2)

                if not stxo and not utxo and not ustxo:
                    try:
                        await commit
                    except:
                        pass
                    next_batch = None
                    limit = 1
                    async with self.db_pool.acquire() as conn:
                        i = await conn.fetchval(
                            "select count(*)  from pg_indexes"
                            " where  indexname = 'address_rich_list'")
                        if i == 0:
                            self.log.warning(
                                "Create index on address table ...")
                            await conn.fetchval(
                                "CREATE INDEX IF NOT EXISTS  "
                                " address_rich_list ON address (balance DESC);"
                            )
                            self.log.warning(
                                "Create index on address completed")
                    # clear cache
                    self.address_cache.clear()
                    self.affected_existed.clear()
                    self.affected_new.clear()
                    self.missed_addresses = set()
                    await asyncio.sleep(1)
                    continue

                qg = time.time()
                block_stat_records = []
                blockchain_stat_records = []
                self.missed_addresses = set()
                missed_addresses = self.missed_addresses

                for w in (stxo, utxo, ustxo):
                    for i in w:
                        if i["address"][0] in (0, 1, 2, 5, 6):
                            if not address_cache.get(i["address"]):
                                missed_addresses.add(i["address"])

                l_records = len(stxo) + len(utxo) + len(ustxo)
                if l_records > address_cache.get_size():
                    address_cache.set_size(l_records)

                len_missed_address = len(missed_addresses)

                await self.load_addresses()
                qg = round(time.time() - qg, 2)

                qc = time.time()
                before_block_balance = dict()
                after_block_balance = dict()
                new_addresses = set()
                input_addresses = set()
                out_addresses = set()
                tx_address = dict()
                ytx_address = dict()
                ztx_address = dict()

                inputs_first_used = 0
                inputs_reused = 0
                inputs_new = 0
                outs_new = 0

                i, y, z = -1, -1, -1
                ihl, yhl, zhl = first_block_height, first_block_height, first_block_height
                itxl, itx, ztx = -1, -1, -1
                ytxl, ytx, ztxl = -1, -1, -1
                zh = first_block_height

                while True:
                    try:
                        i += 1
                        inp = stxo[i]
                        itx = inp["p"] >> 20
                        ih = inp["p"] >> 39
                        ia = inp["address"]
                    except:
                        i = None
                        ih = last_block_height + 1

                    is_new_block = ih != ihl or i is None
                    is_new_tx = (itxl > -1 and itx != itxl) or i is None

                    if is_new_tx:
                        # save previous tx data
                        for addr in tx_address:
                            try:
                                rc, ra, c, frp, lra, lrp, \
                                sc, sa, cd, fsp, lsa, lsp = address_cache[addr]
                            except:
                                rc, ra, c, frp, lra, lrp, sc, sa, cd, fsp, lsa, lsp = (
                                    0, 0, 0, None, None, None, 0, 0, 0, None,
                                    None, None)

                            if addr not in before_block_balance:
                                before_block_balance[addr] = [rc, ra - sa]
                            sc, sa, cd = sc + 1, sa + tx_address[addr][
                                1], cd + tx_address[addr][0]
                            if fsp is None:
                                fsp, inputs_first_used = itxl, inputs_first_used + 1
                            if sc == 2:
                                inputs_reused += 1
                            if sc == 1:
                                inputs_new += 1
                            lsp = itxl
                            lsa = tx_address[addr][1]
                            address_cache[addr] = (rc, ra, c, frp, lra, lrp,
                                                   sc, sa, cd, fsp, lsa, lsp)
                            if rc == 0:
                                new_addresses.add(addr)
                            if addr in new_addresses:
                                affected_new[addr] = (rc, ra, c, frp, lra, lrp,
                                                      sc, sa, cd, fsp, lsa,
                                                      lsp)
                            else:
                                affected_existed[addr] = (rc, ra, c, frp, lra,
                                                          lrp, sc, sa, cd, fsp,
                                                          lsa, lsp)
                            after_block_balance[addr] = ra - sa
                        tx_address = dict()

                    if is_new_block:
                        while True:
                            try:
                                y += 1
                                out = utxo[y]
                                yh = out["p"] >> 39
                                ytx = out["p"] >> 20
                                ya = out["address"]
                            except:
                                y = None
                                yh = last_block_height + 1

                            is_new_block = yh != yhl or y is None
                            is_new_tx = (ytxl > -1
                                         and ytx != ytxl) or y is None

                            if is_new_tx:
                                # save previous tx data
                                for addr in ytx_address:
                                    try:
                                        rc, ra, c, frp, lra, lrp, \
                                        sc, sa, cd, fsp, lsa, lsp = address_cache[addr]
                                    except:
                                        rc, ra, c, frp, lra, lrp, sc, sa, cd, fsp, lsa, lsp = (
                                            0, 0, 0, None, None, None, 0, 0, 0,
                                            None, None, None)

                                    if addr not in before_block_balance:
                                        before_block_balance[addr] = [
                                            rc, ra - sa
                                        ]
                                    coins, amount = ytx_address[addr]
                                    rc += 1
                                    ra += amount
                                    c += coins
                                    if frp is None:
                                        frp = ytxl
                                        new_addresses.add(addr)
                                        outs_new += 1
                                    lrp = ytxl
                                    lra = amount
                                    address_cache[addr] = (rc, ra, c, frp, lra,
                                                           lrp, sc, sa, cd,
                                                           fsp, lsa, lsp)
                                    if addr in new_addresses:
                                        affected_new[addr] = (rc, ra, c, frp,
                                                              lra, lrp, sc, sa,
                                                              cd, fsp, lsa,
                                                              lsp)
                                    else:
                                        affected_existed[addr] = (rc, ra, c,
                                                                  frp, lra,
                                                                  lrp, sc, sa,
                                                                  cd, fsp, lsa,
                                                                  lsp)
                                    after_block_balance[addr] = ra - sa
                                ytx_address = dict()

                            if is_new_block:
                                while True:
                                    try:
                                        z += 1
                                        out2 = ustxo[z]
                                        zh = out2["p"] >> 39
                                        ztx = out2["p"] >> 20
                                        za = out2["address"]
                                    except:
                                        z = None
                                        zh = last_block_height + 1
                                    is_new_block = zh != zhl or z is None
                                    is_new_tx = (ztxl > -1
                                                 and ztx != ztxl) or z is None

                                    if is_new_tx:
                                        # save previous tx data
                                        for addr in ztx_address:
                                            try:
                                                rc, ra, c, frp, lra, lrp, \
                                                sc, sa, cd, fsp, lsa, lsp = address_cache[addr]
                                            except:
                                                rc, ra, c, frp, lra, lrp, sc, sa, cd, fsp, lsa, lsp = (
                                                    0, 0, 0, None, None, None,
                                                    0, 0, 0, None, None, None)

                                            if addr not in before_block_balance:
                                                before_block_balance[addr] = [
                                                    rc, ra - sa
                                                ]
                                            coins, amount = ztx_address[addr]
                                            rc += 1
                                            ra += amount
                                            c += coins
                                            if frp is None:
                                                frp = ztxl
                                                new_addresses.add(addr)
                                                outs_new += 1
                                            lrp = ztxl
                                            lra = amount
                                            address_cache[addr] = (rc, ra, c,
                                                                   frp, lra,
                                                                   lrp, sc, sa,
                                                                   cd, fsp,
                                                                   lsa, lsp)
                                            if addr in new_addresses:
                                                affected_new[addr] = (rc, ra,
                                                                      c, frp,
                                                                      lra, lrp,
                                                                      sc, sa,
                                                                      cd, fsp,
                                                                      lsa, lsp)
                                            else:
                                                affected_existed[addr] = (
                                                    rc, ra, c, frp, lra, lrp,
                                                    sc, sa, cd, fsp, lsa, lsp)
                                            after_block_balance[addr] = ra - sa
                                        ztx_address = dict()

                                    if is_new_block:
                                        while True:
                                            block_stat = {
                                                "inputs": {
                                                    "count": {
                                                        "total":
                                                        len(input_addresses),
                                                        "reused":
                                                        inputs_reused
                                                    }
                                                },
                                                "outputs": {
                                                    "count": {
                                                        "total":
                                                        len(out_addresses),
                                                        "new": outs_new
                                                    }
                                                },
                                                "total":
                                                len(input_addresses
                                                    | out_addresses)
                                            }
                                            blockchain_stat[
                                                "outputs"] += outs_new
                                            blockchain_stat[
                                                "inputs"] += inputs_new
                                            blockchain_stat[
                                                "reused"] += inputs_reused

                                            for amount in after_block_balance.values(
                                            ):
                                                if amount < 0:
                                                    print(amount)
                                                key = 'null' if amount == 0 else str(
                                                    floor(log10(amount)))
                                                try:
                                                    blockchain_stat[
                                                        "amountMap"][key][
                                                            "amount"] += amount
                                                    blockchain_stat[
                                                        "amountMap"][key][
                                                            "count"] += 1
                                                except:
                                                    blockchain_stat[
                                                        "amountMap"][key] = {
                                                            "amount": amount,
                                                            "count": 1
                                                        }

                                            for v in before_block_balance.values(
                                            ):
                                                if v[0] == 0:
                                                    continue
                                                amount = v[1]
                                                key = 'null' if amount == 0 else str(
                                                    floor(log10(amount)))
                                                blockchain_stat["amountMap"][
                                                    key]["amount"] -= amount
                                                blockchain_stat["amountMap"][
                                                    key]["count"] -= 1
                                            block_stat_records.append(
                                                (ihl, json.dumps(block_stat)))
                                            blockchain_stat_records.append(
                                                (ihl,
                                                 json.dumps(blockchain_stat)))
                                            ihl += 1
                                            if yhl < ihl:
                                                yhl += 1
                                            if zhl < yhl:
                                                zhl += 1
                                            input_addresses = set()
                                            out_addresses = set()
                                            inputs_reused = 0
                                            inputs_new = 0
                                            outs_new = 0
                                            after_block_balance = dict()
                                            before_block_balance = dict()
                                            if zhl >= zh or yhl >= yh:
                                                break
                                        await asyncio.sleep(0)

                                    if z is not None:
                                        # handel output record
                                        if ihl == ih:
                                            z -= 1
                                            break
                                        if yh < zh:
                                            z -= 1
                                            break

                                        if za[0] in (0, 1, 2, 5, 6):
                                            out_addresses.add(za)
                                            try:
                                                # [coins, amount]
                                                ztx_address[za][0] += 1
                                                ztx_address[za][1] += out2["a"]
                                            except:
                                                ztx_address[za] = [
                                                    1, out2["a"]
                                                ]
                                        ztxl = ztx

                                    if is_new_block:
                                        if ih < zh:
                                            break
                                        if z is None:
                                            break

                            if ih <= yhl:
                                if y is None:
                                    y = len(utxo)
                                else:
                                    y -= 1
                                break

                            if is_new_block:
                                if yhl >= yh and yhl >= ih:
                                    break

                            # handel output record
                            if yh <= zh:
                                if y is not None and ya[0] in (0, 1, 2, 5, 6):
                                    out_addresses.add(ya)
                                    try:
                                        # [coins, amount]
                                        ytx_address[ya][0] += 1
                                        ytx_address[ya][1] += out["a"]
                                    except:
                                        ytx_address[ya] = [1, out["a"]]
                                ytxl = ytx
                            else:
                                if y is None:
                                    y = len(utxo)
                                else:
                                    y -= 1
                    # handel input record
                    if i is not None and ia[0] in (0, 1, 2, 5, 6):
                        input_addresses.add(ia)
                        try:
                            # [coins_destroyed, amount]
                            tx_address[ia][0] += 1
                            tx_address[ia][1] += inp["a"]
                        except:
                            tx_address[ia] = [1, inp["a"]]
                    itxl = itx

                    if is_new_block:
                        if ihl > ih or i is None:
                            break

                batch_new = deque()
                affected_new_pop = affected_new.pop
                batch_new_append = batch_new.append
                while affected_new:
                    a, v = affected_new_pop()
                    balance = v[1] - v[7]
                    data = serialize_address_data(*v)
                    batch_new_append((a, balance, data))
                batch_existed = deque()

                affected_existed_pop = affected_existed.pop
                batch_existed_append = batch_existed.append
                while affected_existed:
                    a, v = affected_existed_pop()
                    balance = v[1] - v[7]
                    data = serialize_address_data(*v)
                    batch_existed_append((a, balance, data))
                qc = round(time.time() - qc, 2)

                qs = time.time()
                previous_height = height
                if commit_task:
                    height = await commit
                else:
                    height = last_block_height

                qs = round(time.time() - qs, 2)

                qt = round(time.time() - qt, 2)
                self.log.debug(
                    "Address state processor round %s; Get records %s; Load addresses %s; Computation %s; Save %s"
                    % (qt, ql, qg, qc, qs))
                self.log.info(
                    "Address state/analytica +%s blocks; last block %s;" %
                    (last_block_height - first_block_height + 1,
                     last_block_height))
                if l_records > 2000000:
                    if limit > 10:
                        limit -= 2
                elif len_missed_address < 200000 and limit < 1000:
                    limit += 2

            except asyncio.CancelledError:
                self.log.warning("Addresses state task canceled")
                break
            except Exception as err:
                self.address_cache.clear()
                self.affected_existed.clear()
                self.affected_new.clear()
                self.missed_addresses = set()
                block_stat_records = []
                height = -1

                try:
                    commit.cancel()
                    await commit
                except:
                    pass

                commit_task = False
                next_batch = None
                self.log.error("Addresses state task error: %s" % err)
                print(traceback.format_exc())

                await asyncio.sleep(1)

    async def commit_changes(self, height, block_stat_records,
                             blockchain_stat_records, batch_new,
                             batch_existed):
        async with self.db_pool.acquire() as conn:
            c = await conn.fetchval(
                " SELECT n_dead_tup FROM pg_stat_user_tables  "
                " WHERE relname = 'address' LIMIT 1;")
            if c and c > 100000000:
                qm = time.time()
                self.log.warning("Address table maintenance ...")
                await conn.execute("VACUUM FULL address;")
                await conn.execute("ANALYZE address;")

                self.log.warning("Address table maintenance completed %s" %
                                 round(time.time() - qm, 2))

            async with conn.transaction():
                check_height = await conn.fetchval(
                    "SELECT height FROM  "
                    "blockchian_address_stat  order by height desc LIMIT 1 FOR UPDATE;"
                )

                if check_height is None:
                    check_height = -1

                if check_height != height:
                    # data changed during process recalculate again
                    raise Exception("data changed")
                await conn.copy_records_to_table(
                    'block_address_stat',
                    columns=["height", "addresses"],
                    records=block_stat_records)

                await conn.copy_records_to_table(
                    'blockchian_address_stat',
                    columns=["height", "addresses"],
                    records=blockchain_stat_records)
                await conn.copy_records_to_table(
                    'address',
                    columns=["address", "balance", "data"],
                    records=batch_new)
                await conn.execute(
                    """
                                      UPDATE address SET data = r.data,  balance = r.balance 
                                      FROM 
                                      (SELECT address, balance, data FROM UNNEST($1::Address[])) AS r 
                                      WHERE  address.address = r.address;
                                   """, batch_existed)
                check_height = await conn.fetchval(
                    "SELECT height FROM  "
                    "blockchian_address_stat  order by height desc LIMIT 1 FOR UPDATE;"
                )

                return check_height

    async def load_addresses_batch(self, batch):
        async with self.db_pool.acquire() as conn:
            rows = await conn.fetch(
                "SELECT  address, data FROM address WHERE address = ANY($1);",
                batch)
            for row in rows:
                self.address_cache[row["address"]] = deserialize_address_data(
                    row["data"])
        return len(rows)

    async def load_addresses(self):
        q = time.time()
        self.log.debug("Request %s addresses from db ... " %
                       len(self.missed_addresses))
        self.requested_addresses += len(self.missed_addresses)
        batches = chunks(self.missed_addresses, self.threads, 10000)
        tasks = [
            self.loop.create_task(self.load_addresses_batch(b))
            for b in batches
        ]
        count = 0
        if tasks:
            await asyncio.wait(tasks)
            for task in tasks:
                count += task.result()

        self.loaded_addresses += count
        self.log.debug("Loaded %s addresses [%s]" %
                       (count, round(time.time() - q, 2)))

    def terminate(self, a, b):
        self.loop.create_task(self.terminate_coroutine())

    async def terminate_coroutine(self):
        self.active = False
        if self.synchronization_task:
            self.synchronization_task.cancel()
            r = await self.synchronization_task
            try:
                r.result()
            except:
                pass
        self.log.info("address state module stopped")
        self.loop.stop()
示例#6
0
try:
    if config["SERVER"]["api_debug_mode"] == "on":
        dbg = True

except:
    pass

app = web.Application()

app['dsn'] = postgres_dsn
app['debug'] = dbg
app['pool_threads'] = int(pool_threads)
app['log'] = logger
app['testnet'] = testnet
app["merkle_tree_cache"] = LRU(1000)
app["block_transactions"] = LRU(500)
app["block_transaction_id_list"] = LRU(500)
app["rpc"] = None

try:
    app["transaction"] = True if config["OPTIONS"][
        "transaction"] == "on" else False
    app["merkle_proof"] = True if config["OPTIONS"][
        "merkle_proof"] == "on" else False
    app["address_state"] = True if config["OPTIONS"][
        "address_state"] == "on" else False
    app["address_timeline"] = True if config["OPTIONS"][
        "address_timeline"] == "on" else False
    app["blocks_data"] = True if config["OPTIONS"][
        "blocks_data"] == "on" else False