def accumulation_hook(self, db_session): self.block.count_log = len(self.block.logs) for idx, log_data in enumerate(self.block.logs): log_digest = LogDigest(ScaleBytes(log_data)) log_digest.decode() log = Log( block_id=self.block.id, log_idx=idx, type_id=log_digest.index, type=log_digest.index_value, data=log_digest.value, ) if log.type == 'PreRuntime': if log.data['value']['engine'] == 'BABE': # Determine block producer self.block.authority_index = log.data['value']['data']['authorityIndex'] self.block.slot_number = log.data['value']['data']['slotNumber'] if log.data['value']['engine'] == 'aura': self.block.slot_number = log.data['value']['data']['slotNumber'] log.save(db_session)
def accumulation_hook(self, db_session): self.block.count_log = len(self.block.logs) for idx, log_data in enumerate(self.block.logs): log_digest = LogDigest(ScaleBytes(log_data)) log_digest.decode() if log_digest.index_value == "PreRuntime": data = log_digest.value.get('value').get('data') if data: res = RawBabePreDigest(ScaleBytes("0x{}".format(data))) if data[0:2] == "01" and len(data) == 34: res.decode() self.block.account_index = res.value.get("Secondary").get("authorityIndex") else: res.decode(check_remaining=False) self.block.account_index = res.value.get("Primary").get("authorityIndex") self.block.save(db_session) log = Log( block_id=self.block.id, log_idx=idx, type_id=log_digest.index, type=log_digest.index_value, data=log_digest.value, ) log.save(db_session)
def get_item(self, item_id): if '-' in item_id: st = item_id.split("-") return Log.query(self.session).filter_by(log_idx=int(st[2]), block_id=int(st[1]), shard_num=int( st[0])).first() else: return Log.query( self.session).filter_by(id=item_id.split('-')[0]).first()
def log_processor(self, logs, block_id): count_log = len(logs) shard_num = None if count_log != 0: for idx, log_data in enumerate(logs): if idx == 1: print('== log_data ===log_data-{}--{} '.format( log_data, idx)) ni = log_data.index('03000000000000000000') final = '0x' + log_data[20 + ni:] oy = ScaleDecoder.get_decoder_class( 'Vec<(SessionKey, u64)>', ScaleBytes(final)) oy.decode() print('== oy.value ===oy.value-{} '.format(oy.value)) for i in range(len(oy.value)): oy.value[i] = "{'authoritiy': '" + oy.value[i][ "col1"] + "', 'weight': " + str( oy.value[i]["col2"]) + "}" elif idx == 0: print('== log_data ===log_data-{}--{} '.format( log_data, idx)) shard_num = log_data[10:12] print('== shard_num ===shard_num-{} '.format(shard_num)) log = Log.query(self.session).filter( Log.block_id == block_id, Log.log_idx == 1, Log.shard_num == shard_num).first() print('== log ===dblog-{} '.format(log.block_id)) print('== log ===true-block_id-{} '.format(block_id)) log.data = oy.value, self.session.commit()
def get_relationships(self, include_list, item): relationships = {} if 'extrinsics' in include_list: relationships['extrinsics'] = Extrinsic.query( self.session).filter_by( block_id=item.bid, shard_num=item.shard_num).order_by('extrinsic_idx') if 'transactions' in include_list: relationships['transactions'] = Extrinsic.query( self.session).filter_by( block_id=item.bid, signed=1, shard_num=item.shard_num).order_by('extrinsic_idx') if 'inherents' in include_list: relationships['inherents'] = Extrinsic.query(self.session).filter( Extrinsic.block_id == item.bid, Extrinsic.signed == 0, Extrinsic.shard_num == item.shard_num, Extrinsic.module_id != 'relay').order_by('extrinsic_idx') if 'relay' in include_list: relationships['relay'] = Extrinsic.query(self.session).filter( Extrinsic.block_id == item.bid, Extrinsic.signed == 0, Extrinsic.shard_num == item.shard_num, Extrinsic.module_id == 'relay').order_by('extrinsic_idx') if 'events' in include_list: relationships['events'] = Event.query(self.session).filter_by( block_id=item.bid, system=0, shard_num=item.shard_num).order_by('event_idx') if 'logs' in include_list: relationships['logs'] = Log.query(self.session).filter_by( block_id=item.bid, shard_num=item.shard_num).order_by('log_idx') return relationships
def sync_block_account_id(self): db_session = self.session blocks = Block.query(db_session).filter( Block.account_index.is_(None)).all() for block in blocks: log = Log.query(db_session).filter(Log.block_id == block.id).filter( Log.type == 'PreRuntime').first() if log: data = log.data.get("value").get("data") if data: if data[0:2] != "01" and data[0:2] != "00": continue res = RawBabePreDigest(ScaleBytes("0x{}".format(data))) if data[0:2] == "01" and len(data) == 34: res.decode() block.account_index = res.value.get("Secondary").get( "authorityIndex") elif data[0:2] == "00": res.decode(check_remaining=False) block.account_index = res.value.get("Primary").get( "authorityIndex") else: raise "error log data ".format(data) block.save(db_session) print("...................", block.id, block.account_index) else: print("...................", "Blocks not found") db_session.commit()
def accumulation_hook(self, db_session): self.block.count_log = len(self.block.logs) for idx, log_data in enumerate(self.block.logs): log_digest = LogDigest(ScaleBytes(log_data)) log_digest.decode() log = Log( block_id=self.block.id, log_idx=idx, type_id=log_digest.index, type=log_digest.index_value, data=log_digest.value, ) log.save(db_session)
def accumulation_hook(self, db_session): self.block.count_log = len(self.block.logs) for idx, log_data in enumerate(self.block.logs): log_digest = LogDigest(ScaleBytes(log_data)) log_digest.decode() log = Log( block_id=self.block.id, log_idx=idx, type_id=log_digest.index, type=log_digest.index_value, data=log_digest.value, ) if log.type == 'PreRuntime': if log.data['value']['engine'] == 'BABE': # Determine block producer babe_predigest_cls = RuntimeConfiguration( ).get_decoder_class('RawBabePreDigest') babe_predigest = babe_predigest_cls( ScaleBytes( bytearray.fromhex( log.data['value']['data'].replace( '0x', '')))).decode() if len(list(babe_predigest.values())) > 0: babe_predigest_value = list(babe_predigest.values())[0] log.data['value']['data'] = babe_predigest_value self.block.authority_index = log.data['value']['data'][ 'authorityIndex'] self.block.slot_number = log.data['value']['data'][ 'slotNumber'] if log.data['value']['engine'] == 'aura': aura_predigest_cls = RuntimeConfiguration( ).get_decoder_class('RawAuraPreDigest') aura_predigest = aura_predigest_cls( ScaleBytes( bytearray.fromhex( log.data['value']['data'].replace( '0x', '')))).decode() log.data['value']['data'] = aura_predigest self.block.slot_number = aura_predigest['slotNumber'] log.save(db_session)
def get_relationships(self, include_list, item): relationships = {} if 'extrinsics' in include_list: relationships['extrinsics'] = Extrinsic.query(self.session).filter_by(block_id=item.id).order_by( 'extrinsic_idx') if 'transactions' in include_list: relationships['transactions'] = Extrinsic.query(self.session).filter_by(block_id=item.id, signed=1).order_by( 'extrinsic_idx') if 'inherents' in include_list: relationships['inherents'] = Extrinsic.query(self.session).filter_by(block_id=item.id, signed=0).order_by( 'extrinsic_idx') if 'events' in include_list: relationships['events'] = Event.query(self.session).filter_by(block_id=item.id, system=0).order_by( 'event_idx') if 'logs' in include_list: relationships['logs'] = Log.query(self.session).filter_by(block_id=item.id).order_by( 'log_idx') return relationships
def accumulation_hook(self, db_session): #print('start add_block Process block processors {} =='.format("Process block log processors")) self.block.count_log = len(self.block.logs) if self.block.count_log != 0: for idx, log_data in enumerate(self.block.logs): if idx == 1: num = log_data.index('03000000000000000000') final = '0x' + log_data[20 + num:] oy = ScaleDecoder.get_decoder_class( 'Vec<(SessionKey, u64)>', ScaleBytes(final)) oy.decode() for i in range(len(oy.value)): oy.value[i] = "{'authoritiy': '" + oy.value[i][ "col1"] + "', 'weight': " + str( oy.value[i]["col2"]) + "}" log = Log( block_id=self.block.bid, log_idx=1, type_id=0, type='Other', data=oy.value, shard_num=self.block.shard_num, ) else: log_digest = LogDigest(ScaleBytes(log_data)) log_digest.decode() log = Log( block_id=self.block.bid, log_idx=idx, type_id=log_digest.index, type=log_digest.index_value, data=log_digest.value, shard_num=self.block.shard_num, ) log.save(db_session)
def get_item(self, item_id): if len(item_id.split('-')) != 2: return None return Log.query(self.session).get(item_id.split('-'))
def get_query(self): return Log.query(self.session).order_by(Log.block_id.desc())
def accumulation_revert(self, db_session): for item in Log.query(db_session).filter_by(block_id=self.block.id): db_session.delete(item)
def accumulation_revert(self, db_session): for item in Log.query(db_session).filter_by(block_id=self.block.id): db_session.delete(item) self.block.authority_index = None self.block.slot_number = None
def get_item(self, item_id): return Log.query(self.session).get(item_id.split('-'))
def on_post(self, req, resp): blockHash = None if req.media.get('block_id'): substrate = SubstrateInterface(url=SUBSTRATE_RPC_URL, address_type=SUBSTRATE_ADDRESS_TYPE, type_registry_preset=TYPE_REGISTRY) blockHash = substrate.get_block_hash(req.media.get('block_id')) elif req.media.get('block_hash'): blockHash = req.media.get('block_hash') else: resp.status = falcon.HTTP_BAD_REQUEST resp.media = { 'errors': ['Either blockHash or block_id should be supplied'] } if blockHash: resp.status = falcon.HTTP_200 block = Block.query( self.session).filter(Block.hash == blockHash).first() blockTotal = BlockTotal.query( self.session).filter(BlockTotal.id == block.id).first() author = ss58_encode( blockTotal.author.replace('0x', '') ) if blockTotal is not None and blockTotal.author is not None else None if block: blockInfo = {} blockInfo["timestamp"] = block.datetime.strftime( "%Y-%m-%d %H:%M:%S") blockInfo["block_hash"] = block.hash blockInfo["block_id"] = block.id blockInfo["parent_id"] = block.id - 1 if block.id > 0 else 0 blockInfo["child_id"] = block.id + 1 blockInfo["parent_hash"] = block.parent_hash blockInfo["state_root"] = block.state_root blockInfo["extrinsic_root"] = block.extrinsics_root blockInfo["validator"] = author blockInfo["count_extrinsic"] = block.count_extrinsics blockInfo["count_event"] = block.count_events blockInfo["count_log"] = block.count_log # blockInfo["age"] = time.mktime(block.datetime.timetuple()) blockInfo["age"] = block.datetime.strftime("%Y-%m-%d %H:%M:%S") # 获取和区块相关的交易信息 extrinsics = Extrinsic.query( self.session).filter(Extrinsic.block_id == block.id).all() extrinsicsObj = [ { "extrinsic_id": '{}-{}'.format(block.id, extrinsic.extrinsic_idx), "hash": extrinsic.extrinsic_hash if extrinsic.extrinsic_hash else None, # "age": time.mktime(block.datetime.timetuple()), "age": block.datetime.strftime("%Y-%m-%d %H:%M:%S"), "result": extrinsic.success, # "address": extrinsic.address if extrinsic.address else None, # "module": extrinsic.module_id, # "fee": None, # "nonce": extrinsic.nonce if extrinsic.nonce else None, # "call": extrinsic.call_id, "operation": '{}({})'.format(extrinsic.module_id, extrinsic.call_id), "params": extrinsic.params # "signature": extrinsic.signature if extrinsic.signature else None } for extrinsic in extrinsics ] # 获取和区块相关的日志信息 logs = Log.query( self.session).filter(Log.block_id == block.id).all() logsObj = [{ "log_id": '{}-{}'.format(block.id, log.log_idx), "block_id": block.id, "type": log.type, "data": log.data['value'] } for log in logs] # 获取和区块相关的事件信息 events = Event.query( self.session).filter(Event.block_id == block.id).all() eventObj = [{ "id": '{}-{}'.format(block.id, event.event_idx), "block_id": block.id, "block_hash": block.hash, "module_id": event.module_id, "event_id": event.event_id, "attributes": event.attributes, "operation": '{}({})'.format(event.module_id, event.event_id), "desc": self.getEventDesc(event.module_id, event.event_id), "hash": self.getEventHash(block.id, event.extrinsic_idx) } for event in events] resp.media = { 'status': 'success', 'data': { "block_info": blockInfo, "extrinsics": extrinsicsObj, "logs": logsObj, "events": eventObj } } else: resp.status = falcon.HTTP_404 resp.media = {'result': 'Block not found'}