def block(db): Block.create( **{ 'height': 0, 'hash': 'cf7938a048f1442dd34f87ce56d3e25455b22a44f676325f1ae8c7a33d0731c7', 'timestamp': '2018-06-17 00:50:36', 'merkle_root': '3ead103523ad8f9bfc8365c7b5ddb6f10c731c6274730e88bcaa2c74606dd4bb', 'tx': [ "3ead103523ad8f9bfc8365c7b5ddb6f10c731c6274730e88bcaa2c74606dd4bb" ], 'difficulty': 0.000244141, 'size': 286, 'nonce': 2085541870, 'version': b'01000000', 'bits': b'f0ff0f1e', 'coinbase': b'04ffff001d0104464e6f727468204b6f7265616e2066696c6d206f6e204b696d27732053696e6761706f726520747269702072657665616c73206e657720666f637573206f6e2065636f6e6f6d79', 'tx_count': 1 })
def read_status(q=None): if q == 'getInfo': latest_block = Block.select().order_by(Block.height.desc()).get() mempool_txs = Transaction.select().where(Transaction.block == None).count() return { 'blocks': latest_block.height, 'lastblockhash': latest_block.hash, 'difficulty': latest_block.difficulty, 'mempool_txs': mempool_txs, } elif q == 'getBestBlockHash': latest_block = Block.select(Block.hash).where(Block.orphaned == False).order_by(Block.height.desc()).get() return { 'bestblockhash': latest_block.hash } elif q == 'getDifficulty': latest_block = Block.select(Block.difficulty).where(Block.orphaned == False).order_by(Block.height.desc()).get() return { 'difficulty': latest_block.difficulty } elif q == 'getLastBlockHash': latest_block = Block.select(Block.hash).where(Block.orphaned == False).order_by(Block.height.desc()).get() return { 'syncTipHash': latest_block.hash, 'lastblockhash': latest_block.hash, }
def checkblocks(self, height, force=False): if force or height % 300 == 0: self.log.debug('Commit blocks') blocks = [] hashes = [] with self.db.write_batch(transaction=True) as deleteBatch: for key, value in self.db.iterator(prefix=b'pg_block:'): data = json.loads(value.decode('utf-8')) hashes.append(data['hash']) data['version'] = struct.pack('>i', data['version']) data['bits'] = struct.pack('>i', data['bits']) data['coinbase'] = base64.decodebytes(data['coinbase'].encode()) data['timestamp'] = datetime.fromtimestamp(data['timestamp']) data['chainwork'] = int_to_bytes(data['chainwork']) blocks.append(data) deleteBatch.delete(key) if blocks: if not self.initial_sync: for block in blocks: b = Block.create(**block) external_sio.emit('block', b.to_json(), room='inv') else: Block.insert_many(blocks).execute(None) with self.db.write_batch(transaction=True) as deleteBatch: with self.db.snapshot() as sn: wallets = [] walletsAddress = [] for key, value in sn.iterator(prefix=b'walletCreate:'): addr = key.decode().split(':')[1] uid = value.decode() wallets.append({ 'uid': uid }) walletsAddress.append({ 'wallet': uid, 'address': addr, }) deleteBatch.delete(key) self.log.info('PG wallet create %s for %s' % (uid, addr)) if wallets: WalletGroup.insert_many(wallets).on_conflict_ignore().execute(None) for key, value in sn.iterator(prefix=b'walletAdd:'): addr = key.decode().split(':')[1] walletsAddress.append({ 'wallet': value.decode(), 'address': addr, }) self.log.info('PG wallet add %s for %s' % (value.decode(), addr)) deleteBatch.delete(key) if walletsAddress: WalletGroupAddress.insert_many(walletsAddress).execute(None) for key, value in sn.iterator(prefix=b'walletMerge:'): existingWalletId = key.decode().split(':')[1] newWalletId = value.decode() deleteBatch.delete(key) res = WalletGroupAddress.update(wallet = newWalletId).where(WalletGroupAddress.wallet == existingWalletId).execute(None) self.log.info('PG wallet merge from %s to %s (%s)' % (existingWalletId, newWalletId, res))
def read_blockhash(blockhash): prev = None nxt = None try: b = Block.get(Block.hash == blockhash) if b.height > 0: prev = Block.get(Block.height == b.height - 1) except Block.DoesNotExist: return HTMLResponse(status_code=404) try: nxt = Block.get(Block.height == b.height + 1, Block.orphaned == False) except Block.DoesNotExist: pass txs = list( Transaction.select().where(Transaction.block == b.hash).execute()) txs = list( map( lambda tx: { 'txid': tx.txid, 'timestamp': int(tx.timestamp.timestamp()), 'addresses_in': tx.addresses_in, 'addresses_out': tx.addresses_out }, txs)) def func(a): return 1 if 'null' in a['addresses_in'] else 0 txs.sort(key=func, reverse=True) pool = None cb = bytes(b.coinbase) for key, value in POOLS.items(): if cb.find(key.encode()) != -1: pool = value res = { 'height': b.height, 'hash': b.hash, 'timestamp': int(b.timestamp.timestamp()), 'merkleroot': b.merkle_root, 'txs': txs, 'difficulty': b.difficulty, 'size': b.size, 'version_hex': bytes(b.version).hex(), 'version': struct.unpack('i', bytes(b.version))[0], 'bits': bytes(b.bits).hex(), 'nonce': b.nonce, 'pool': pool, } if prev: res['previousblockhash'] = prev.hash if nxt: res['nextblockhash'] = nxt.hash return res
def read_blocks(beforeBlock=None, limit : int = 100): q = Block.select() if beforeBlock: q = q.where(Block.height < beforeBlock) if limit > 100: limit = 100 blocks = q.order_by(Block.timestamp.desc()).limit(limit) res = [] for b in blocks: pool = None cb = bytes(b.coinbase) for key, value in POOLS.items(): if cb.find(key.encode()) != -1: pool = value res.append({ 'height': b.height, 'hash': b.hash, 'timestamp': int(b.timestamp.timestamp()), 'merkle_root': b.merkle_root, 'tx': b.tx, 'difficulty': b.difficulty, 'size': b.size, 'version_hex': bytes(b.version).hex(), 'version': struct.unpack('i', bytes(b.version))[0], 'bits': bytes(b.bits).hex(), 'nonce': b.nonce, 'pool': pool }) return res
def read_block_txs(block : str): try: b = Block.get(Block.hash == block) except Block.DoesNotExist: return HTMLResponse(status_code=404) res = { 'txs': [] } txs = Transaction.select().where(Transaction.txid.in_(b.tx)) block = get_latest_block() for tx in txs: is_coinbase = (len(tx.vin) == 0) res['txs'].append({ 'blockhash': b.hash, 'blockheight': b.height, 'blocktime': int(b.timestamp.timestamp()), 'confirmations': get_confirmations(b.height, block=block), 'isCoinBase': is_coinbase, 'txid': tx.txid, 'valueOut': tx.output_value, 'vin': tx.vin, 'vout': tx.vout, }) return res
def read_status(q=None): if q == 'getInfo': latest_block = Block.select().order_by(Block.height.desc()).get() mempool_txs = Transaction.select().where(Transaction.block == None).count() return { 'blocks': latest_block.height, 'lastblockhash': latest_block.hash, 'difficulty': latest_block.difficulty, 'mempool_txs': mempool_txs, }
def __init__(self, log, mempool, params): self.log = log self.mempool = mempool self.params = params self.utxo_changes = 0 self.cache = Cache() self.cache.clear() ## level DB # pg_block: block data to insert into PG database # pg_tx: transaction data to insert into PG database # tx:* transaction outputs # misc:* state # height:* list of blocks at height h # blkmeta:* block metadata # blocks:* block seek point in stream datadir = '/data/explorer/blocks/' self.db = self.cache.db self.blk_write = io.BufferedWriter(io.FileIO(datadir + '/blocks.dat','ab')) self.blk_read = io.BufferedReader(io.FileIO(datadir + '/blocks.dat','rb')) if self.db.get(b'misc:height') is None: self.log.info('INITIALIZING EMPTY BLOCKCHAIN DATABASE') with self.db.write_batch(transaction=True) as batch: batch.put(b'misc:height', struct.pack('i', -1)) batch.put(b'misc:msg_start', self.params.NETMAGIC) batch.put(b'misc:tophash', ser_uint256(0)) batch.put(b'misc:total_work', b'0x0') start = self.db.get(b'misc:msg_start') if start != self.params.NETMAGIC: self.log.error("Database magic number mismatch. Data corruption or incorrect network?") raise RuntimeError self.block_lock = BoundedSemaphore() self.address_changes = {} self.address_change_count = 0 self.transaction_change_count = 0 self.utxo_cache = {} self.tx_lock = False self.initial_sync = True self.wallet_group = WalletGrouper('/data/explorer/wallets') self.checktransactions(True) self.checkaddresses(True) self.checkblocks(0, True) self.checkutxos(True) self.orphans = {} self.orphan_deps = {} if Block.select().count(None) == 0: self.log.info('Initialising genesis block') self.putblock(self.params.GENESIS_BLOCK)
def checkblocks(self, height, force=False): if force or height % 300 == 0: self.log.debug('Commit blocks') blocks = [] hashes = [] with self.db.write_batch(transaction=True) as deleteBatch: for key, value in self.db.iterator(prefix=b'pg_block:'): data = json.loads(value.decode('utf-8')) hashes.append(data['hash']) data['version'] = struct.pack('i', data['version']) data['bits'] = struct.pack('i', data['bits']) data['coinbase'] = base64.decodebytes(data['coinbase'].encode()) data['timestamp'] = datetime.fromtimestamp(data['timestamp']) blocks.append(data) deleteBatch.delete(key) if blocks: if not self.initial_sync: for block in blocks: b = Block.create(**block) # print(b.to_json()) external_sio.emit('block', b.to_json(), room='inv') else: Block.insert_many(blocks).execute(None)
def disconnect_block(self, block): ser_prevhash = b2lx(block.hashPrevBlock) prevmeta = BlkMeta() prevmeta.deserialize(self.db.get(('blkmeta:'+ser_prevhash).encode())) tup = self.unique_outputs(block) if tup is None: return False outputs = tup[0] # mark deps as unspent with self.db.write_batch(transaction=True) as batch: for output in outputs: self.clear_txout(output[0], output[1], batch) # update tx index and memory pool for tx in block.vtx: ser_hash = b2lx(tx.GetTxid()) batch.delete(('tx:'+ser_hash).encode()) if not tx.is_coinbase(): self.mempool_add(tx) # update database pointers for best chain batch.put(b'misc:total_work', int_to_bytes(prevmeta.work)) batch.put(b'misc:height', struct.pack('i', prevmeta.height)) batch.put(b'misc:tophash', lx(ser_prevhash)) # TODO: # [x] Search block cache for block and marked as orphaned # [x] Search transaction cache and delete # # [x] Mark block as orphaned # [x] Remove transactions from transaction table, mempool add done above # [x] Revert balance changes # # Disconnects happen infrequently so we can do these updates to postgres DB immediately bhash = b2lx(block.GetHash()) key = ('pg_block:%s' % bhash).encode() cached_block = self.db.get(key) if cached_block: cached_block = json.loads(cached_block.decode('utf-8')) cached_block['orphaned'] = True batch.put(key, (json.dumps(cached_block)).encode()) for tx in block.vtx: if tx.is_coinbase(): continue ser_hash = b2lx(tx.GetTxid()) key = ('pg_tx:%s' % ser_hash) batch.delete(key) Block.update(orphaned=True).where(Block.hash == b2lx(block.GetHash())).execute() Transaction.delete().where(Transaction.block == b2lx(block.GetHash())).execute() for tx in block.vtx: txid = b2lx(tx.GetTxid()) for idx, vin in enumerate(tx.vin): if tx.is_coinbase() and idx == 0: continue preaddress, prevalue = self.getutxo(b2lx(vin.prevout.hash), vin.prevout.n) if preaddress in self.address_changes: self.address_changes[preaddress]['balance'] += prevalue self.address_changes[preaddress]['sent'] -= prevalue else: self.address_changes[preaddress] = { 'balance': prevalue, 'sent': -prevalue, # subtract from sent 'received': 0, } for idx, vout in enumerate(tx.vout): address = self._address_from_vout(txid, vout) value = vout.nValue if address in self.address_changes: self.address_changes[address]['balance'] -= value self.address_changes[address]['received'] -= value else: self.address_changes[address] = { 'balance': -value, 'received': -value, 'sent': 0 } self._update_address_index() self.checkaddresses(force=True) self.log.info("ChainDb(disconn): height %d, block %s" % (prevmeta.height, b2lx(block.hashPrevBlock))) return True
def get_latest_block(): return Block.select().order_by(Block.height.desc()).limit(1)[0]