async def message_loop(self): try: if self.dsn: self.db = await asyncpg.create_pool(dsn=self.dsn, min_size=1, max_size=1) self.reader = await self.get_pipe_reader(self.in_reader) self.writer = await self.get_pipe_writer(self.out_writer) while True: msg_type, msg = await self.pipe_get_msg(self.reader) if msg_type == b'pipe_read_error': return if msg_type == b'get': self.loop.create_task(self.load_blocks(bytes_to_int(msg), self.rpc_batch_limit)) continue if msg_type == b'rpc_batch_limit': self.rpc_batch_limit = bytes_to_int(msg) continue if msg_type == b'target_height': self.target_height = bytes_to_int(msg) continue except: pass
async def load_utxo_from_daemon(self): # # load missed utxo from bitcoind daemon # if not self.missed_failed: return missed = chunks_by_count(self.missed_failed, 50) for m in missed: result = await self.rpc.batch([["getrawtransaction", rh2s(i[:32]), 1] for i in m]) hash_list = set() for r in result: if r["result"]["blockhash"] not in self.restore_blocks_cache: hash_list.add(r["result"]["blockhash"]) result2 = await self.rpc.batch([["getblock", r] for r in hash_list]) for r in result2: self.restore_blocks_cache[r["result"]["hash"]] = r["result"] for key, r in zip(m, result): out_index = bytes_to_int(key[32:]) tx=r["result"] amount = int(tx["vout"][out_index]["value"] * 100000000) script = parse_script(tx["vout"][out_index]["scriptPubKey"]["hex"]) try: address = b"".join((bytes([script["nType"]]), script["addressHash"])) except: address = b"".join((bytes([script["nType"]]), script["script"])) block = self.restore_blocks_cache[tx["blockhash"]] tx_index = block["tx"].index(tx["txid"]) block_height = block["height"] pointer = (block_height << 39) + (tx_index << 20) + (1 << 19) + out_index self.loaded[key] = (pointer, amount, address) self.missed_failed = list() while len(self.restore_blocks_cache) > 1000: self.restore_blocks_cache.pop()
async def message_loop(self, index): while True: msg_type, msg = await self.pipe_get_msg(self.worker[index].reader) if msg_type == b'pipe_read_error': return if msg_type == b'result': self.worker_busy[index] = False blocks = pickle.loads(msg) if blocks: self.last_batch_size = len(msg) else: self.rpc_batch_limit = 40 for i in blocks: self.parent.block_preload.set(i, blocks[i]) if blocks: if self.parent.utxo_data: if self.parent.sync_utxo.checkpoints: if self.parent.sync_utxo.checkpoints[-1] < i: self.parent.sync_utxo.checkpoints.append(i) self.reached_height = i else: self.parent.sync_utxo.checkpoints.append(i) if msg_type == b'failed': self.height = bytes_to_int(msg) self.log.debug("failed load block %s" % self.height) continue
async def apply_block_changes(self, txs, h, conn): if self.db_type == "postgresql": # handle block new coins rows = await conn.fetch("DELETE FROM connector_unconfirmed_p2pk_map " "WHERE tx_id = ANY($1) " "RETURNING address, script, tx_id;", txs) p2pk_map = deque() p2pk_map_backup = deque() for row in rows: p2pk_map.append((row["address"], row["script"])) p2pk_map_backup.append((row["tx_id"], row["address"], row["script"])) if p2pk_map: await conn.executemany("INSERT INTO connector_p2pk_map (address, script) " "VALUES ($1, $2) ON CONFLICT DO NOTHING;", p2pk_map) # remove all block created coins from connector_unconfirmed_utxo table # add new coins to connector_utxo table rows = await conn.fetch("DELETE FROM connector_unconfirmed_utxo " "WHERE out_tx_id = ANY($1) " "RETURNING outpoint, " " out_tx_id as t," " address, " " amount;", txs) batch, uutxo = deque(), deque() for r in rows: batch.append((r["outpoint"], (h << 39) + (txs.index(r["t"]) << 20) + (1 << 19) + bytes_to_int(r[32:]), r["address"], r["amount"])) uutxo.append((r["outpoint"], r["t"], r["address"], r["amount"])) await conn.copy_records_to_table('connector_utxo', columns=["outpoint", "pointer", "address", "amount"], records=batch) # handle block destroyed coins # remove all destroy records from connector_unconfirmed_stxo rows = await conn.fetch("DELETE FROM connector_unconfirmed_stxo " "WHERE tx_id = ANY($1) " "RETURNING outpoint," " sequence," " out_tx_id," " tx_id," " input_index as i," " address as a;", txs) stxo = deque() utxo = deque() outpoints = set() for r in rows: stxo.append((r["outpoint"], r["sequence"], r["out_tx_id"], r["tx_id"], r["i"], r["a"])) outpoints.add(r["outpoint"]) if outpoints: rows = await conn.fetch("DELETE FROM connector_utxo WHERE outpoint = ANY($1) " "RETURNING outpoint, pointer, address, amount;", outpoints) for r in rows: # save deleted utxo except utxo created in recent block if r["pointer"] >> 39 < h: utxo.append((r["outpoint"], r["pointer"], r["address"], r["amount"])) # delete dbs records dbs_stxo = deque() dbs_uutxo = deque() invalid_txs = set() if outpoints: rows = await conn.fetch("DELETE FROM connector_unconfirmed_stxo " "WHERE outpoint = ANY($1) " "RETURNING outpoint," " sequence," " out_tx_id," " tx_id," " input_index as i, " " address as a;", outpoints) for r in rows: dbs_stxo.append((r["outpoint"], r["sequence"], r["out_tx_id"], r["tx_id"], r["i"], r["a"])) invalid_txs.add(r["tx_id"]) # handle invalid transactions while invalid transactions list not empty # remove coins for transactions list from connector_unconfirmed_utxo # remove destroy records for transactions list from connector_unconfirmed_stxo # get new invalid transactions list from deleted records block_invalid_txs = set() while invalid_txs: block_invalid_txs = block_invalid_txs | invalid_txs rows = await conn.fetch("DELETE FROM connector_unconfirmed_utxo " "WHERE out_tx_id = ANY($1) " "RETURNING outpoint, " " out_tx_id as t," " address, " " amount;", invalid_txs) outpoints = set() for r in rows: dbs_uutxo.append((r["outpoint"], r["t"], r["address"], r["amount"])) outpoints.add(r["outpoint"]) rows = await conn.fetch("DELETE FROM connector_unconfirmed_stxo " "WHERE outpoint = ANY($1) " "RETURNING outpoint," " sequence," " out_tx_id," " tx_id," " input_index as i," " address as a;", outpoints) invalid_txs = set() for r in rows: dbs_stxo.append((r["outpoint"], r["sequence"], r["out_tx_id"], r["tx_id"], r["i"], r["a"])) invalid_txs.add(r["tx_id"]) await conn.execute("INSERT INTO connector_block_state_checkpoint (height, data) " "VALUES ($1, $2);", h, pickle.dumps({"utxo": utxo, "uutxo": uutxo, "stxo": stxo, "dbs_uutxo": dbs_uutxo, "dbs_stxo": dbs_stxo, "invalid_txs": block_invalid_txs, "p2pk_map": p2pk_map_backup})) return {"dbs_uutxo": dbs_uutxo, "dbs_stxo": dbs_stxo, "invalid_txs": block_invalid_txs, "stxo": stxo}
async def apply_block_changes(self, txs, h, conn): tx_filters = dict() # handle block new coins rows = await conn.fetch( "DELETE FROM connector_unconfirmed_p2pk_map " "WHERE tx_id = ANY($1) " "RETURNING address, script, tx_id;", txs) p2pk_map = deque() p2pk_map_backup = deque() for row in rows: p2pk_map.append((row["address"], row["script"])) p2pk_map_backup.append( (row["tx_id"], row["address"], row["script"])) if p2pk_map: await conn.executemany( "INSERT INTO connector_p2pk_map (address, script) " "VALUES ($1, $2) ON CONFLICT DO NOTHING;", p2pk_map) # remove all block created coins from connector_unconfirmed_utxo table # add new coins to connector_utxo table rows = await conn.fetch( "DELETE FROM connector_unconfirmed_utxo " "WHERE out_tx_id = ANY($1) " "RETURNING outpoint, " " out_tx_id as t," " address, " " amount;", txs) batch, uutxo = deque(), deque() block_amount = 0 for r in rows: batch.append( (r["outpoint"], (h << 39) + (txs.index(r["t"]) << 20) + (1 << 19) + bytes_to_int(r["outpoint"][32:]), r["address"], r["amount"])) uutxo.append((r["outpoint"], r["t"], r["address"], r["amount"])) block_amount += r["amount"] if self.block_filters: try: tx_filters[txs.index(r["t"])].add(r["address"]) except: tx_filters[txs.index(r["t"])] = {r["address"]} await conn.copy_records_to_table( 'connector_utxo', columns=["outpoint", "pointer", "address", "amount"], records=batch) # handle block destroyed coins # remove all destroy records from connector_unconfirmed_stxo rows = await conn.fetch( "DELETE FROM connector_unconfirmed_stxo " "WHERE tx_id = ANY($1) " "RETURNING outpoint," " sequence," " out_tx_id," " tx_id," " input_index as i," " address as a," " amount as am," " pointer as pt;", txs) stxo, utxo, outpoints = deque(), deque(), set() for r in rows: stxo.append((r["outpoint"], r["sequence"], r["out_tx_id"], r["tx_id"], r["i"], r["a"], r["am"], r["pt"])) outpoints.add(r["outpoint"]) if self.block_filters: try: tx_filters[txs.index(r["tx_id"])].add(r["a"]) except: tx_filters[txs.index(r["tx_id"])] = [r["a"]] if outpoints: rows = await conn.fetch( "DELETE FROM connector_utxo WHERE outpoint = ANY($1) " "RETURNING outpoint, pointer, address, amount;", outpoints) for r in rows: # save deleted utxo except utxo created in recent block if r["pointer"] >> 39 < h: utxo.append((r["outpoint"], r["pointer"], r["address"], r["amount"])) # delete dbs records dbs_stxo, dbs_uutxo, block_invalid_txs = deque(), deque(), set() if outpoints: rows = await conn.fetch( "DELETE FROM connector_unconfirmed_stxo " "WHERE outpoint = ANY($1) " "RETURNING outpoint," " sequence," " out_tx_id," " tx_id," " input_index as i, " " address as a," " amount as am," " pointer as pt;", outpoints) for r in rows: dbs_stxo.append((r["outpoint"], r["sequence"], r["out_tx_id"], r["tx_id"], r["i"], r["a"], r["am"], r["pt"])) block_invalid_txs.add(r["tx_id"]) # handle invalid transactions while invalid transactions list not empty # remove coins for transactions list from connector_unconfirmed_utxo # remove destroy records for transactions list from connector_unconfirmed_stxo # get new invalid transactions list from deleted records invalid_txs = set(block_invalid_txs) while invalid_txs: # delete outputs for invalid tx rows = await conn.fetch( "DELETE FROM connector_unconfirmed_utxo " "WHERE out_tx_id = ANY($1) " "RETURNING outpoint, " " out_tx_id as t," " address, " " amount;", invalid_txs) outpoints = set() for r in rows: dbs_uutxo.append( (r["outpoint"], r["t"], r["address"], r["amount"])) outpoints.add(r["outpoint"]) # delete inputs for invalid tx using outpoint rows = await conn.fetch( "DELETE FROM connector_unconfirmed_stxo " "WHERE outpoint = ANY($1) " "RETURNING outpoint," " sequence," " out_tx_id," " tx_id," " input_index as i," " address as a, " " amount as am," " pointer as pt;", outpoints) # collect new list of invalid tx invalid_txs = set() for r in rows: dbs_stxo.append((r["outpoint"], r["sequence"], r["out_tx_id"], r["tx_id"], r["i"], r["a"], r["am"], r["pt"])) invalid_txs.add(r["tx_id"]) block_invalid_txs.add(r["tx_id"]) # delete inputs for invalid tx using tx rows = await conn.fetch( "DELETE FROM connector_unconfirmed_stxo " "WHERE tx_id = ANY($1) " "RETURNING outpoint," " sequence," " out_tx_id," " tx_id," " input_index as i," " address as a, " " amount as am," " pointer as pt;", invalid_txs) for r in rows: dbs_stxo.append((r["outpoint"], r["sequence"], r["out_tx_id"], r["tx_id"], r["i"], r["a"], r["am"], r["pt"])) await conn.execute( "INSERT INTO connector_block_state_checkpoint (height, data) " "VALUES ($1, $2);", h, pickle.dumps({ "utxo": utxo, "uutxo": uutxo, "stxo": stxo, "p2pk_map": p2pk_map_backup, "coinbase_tx_id": txs[0] })) return { "invalid_uutxo": dbs_uutxo, "invalid_stxo": dbs_stxo, "invalid_txs": block_invalid_txs, "stxo": stxo, "utxo": uutxo, "tx_filters": tx_filters, "coinbase_tx_id": txs[0], "block_amount": block_amount }