def update_account_balances(self): # set balances according to most recent snapshot account_info = self.db_session.execute(""" select a.account_id, a.balance_total, a.balance_free, a.balance_reserved, a.nonce from data_account_info_snapshot as a inner join ( select account_id, max(block_id) as max_block_id from data_account_info_snapshot group by account_id ) as b on a.account_id = b.account_id and a.block_id = b.max_block_id """) for account_id, balance_total, balance_free, balance_reserved, nonce in account_info: Account.query(self.db_session).filter_by(id=account_id).update( { Account.balance_total: balance_total, Account.balance_free: balance_free, Account.balance_reserved: balance_reserved, Account.nonce: nonce, }, synchronize_session='fetch' )
def get_item(self, item_id): if item_id[0:2] == '0x': return Account.query(self.session).filter_by(id=item_id[2:]).first() elif item_id[0:3] == 'did': did = Did.query(self.session).filter_by(did=item_id).first() return Account.query(self.session).filter_by(address = did.address).first() else: return Account.query(self.session).filter_by(address=item_id).first()
def rebuild_account_info_snapshot(self): harvester = PolkascanHarvesterService( db_session=self.session, type_registry=TYPE_REGISTRY, type_registry_file=TYPE_REGISTRY_FILE) last_full_snapshot_block_nr = 0 self.session.execute('truncate table {}'.format( AccountInfoSnapshot.__tablename__)) for account_id, block_id in self.session.query( SearchIndex.account_id, SearchIndex.block_id ).filter(SearchIndex.block_id >= settings.BALANCE_SYSTEM_ACCOUNT_MIN_BLOCK ).order_by('block_id').group_by( SearchIndex.account_id, SearchIndex.block_id).yield_per(1000): if block_id > last_full_snapshot_block_nr + settings.BALANCE_FULL_SNAPSHOT_INTERVAL: last_full_snapshot_block_nr = block_id - block_id % settings.BALANCE_FULL_SNAPSHOT_INTERVAL harvester.create_full_balance_snaphot(last_full_snapshot_block_nr) self.session.commit() else: harvester.create_balance_snapshot(block_id, account_id) self.session.commit() # set balances according to most recent snapshot account_info = self.session.execute(""" select a.account_id, a.balance_total, a.balance_free, a.balance_reserved, a.nonce from data_account_info_snapshot as a inner join ( select account_id, max(block_id) as max_block_id from data_account_info_snapshot group by account_id ) as b on a.account_id = b.account_id and a.block_id = b.max_block_id """) for account_id, balance_total, balance_free, balance_reserved, nonce in account_info: Account.query(self.session).filter_by(id=account_id).update( { Account.balance_total: balance_total, Account.balance_free: balance_free, Account.balance_reserved: balance_reserved, Account.nonce: nonce, }, synchronize_session='fetch') self.session.commit() return {'result': 'account info snapshots rebuilt'}
def serialize_item(self, item): sender = Account.query(self.session).get( item.attributes[0]['value'].replace('0x', '')) if sender: sender_data = sender.serialize() else: sender_data = { 'type': 'account', 'id': item.attributes[0]['value'].replace('0x', ''), 'attributes': { 'id': item.attributes[0]['value'].replace('0x', ''), 'address': ss58_encode(item.attributes[0]['value'].replace('0x', ''), settings.SUBSTRATE_ADDRESS_TYPE) } } destination = Account.query(self.session).get( item.attributes[1]['value'].replace('0x', '')) if destination: destination_data = destination.serialize() else: destination_data = { 'type': 'account', 'id': item.attributes[1]['value'].replace('0x', ''), 'attributes': { 'id': item.attributes[1]['value'].replace('0x', ''), 'address': ss58_encode(item.attributes[1]['value'].replace('0x', ''), settings.SUBSTRATE_ADDRESS_TYPE) } } # Some networks don't have fees if len(item.attributes) == 4: fee = item.attributes[3]['value'] else: fee = 0 return { 'type': 'balancetransfer', 'id': '{}-{}'.format(item.block_id, item.event_idx), 'attributes': { 'block_id': item.block_id, 'event_idx': '{}-{}'.format(item.block_id, item.event_idx), 'sender': sender_data, 'destination': destination_data, 'value': item.attributes[2]['value'], 'fee': fee } }
def sequencing_hook(self, db_session, parent_block_data, parent_sequenced_block_data): for identity_audit in IdentityAudit.query(db_session).filter_by( block_id=self.block.id).order_by('event_idx'): account = Account.query(db_session).get(identity_audit.account_id) if account: if identity_audit.type_id == IDENTITY_TYPE_SET: account.identity_display = identity_audit.data.get( 'display') account.identity_email = identity_audit.data.get('email') account.identity_legal = identity_audit.data.get('legal') account.identity_riot = identity_audit.data.get('riot') account.identity_web = identity_audit.data.get('web') account.identity_twitter = identity_audit.data.get( 'twitter') account.save(db_session) elif identity_audit.type_id in [ IDENTITY_TYPE_CLEARED, IDENTITY_TYPE_KILLED ]: account.identity_display = None account.identity_email = None account.identity_legal = None account.identity_riot = None account.identity_web = None account.identity_twitter = None account.save(db_session)
def balance_snapshot(self, account_id=None, block_start=1, block_end=None, block_ids=None): if account_id: accounts = [account_id] else: accounts = [account.id for account in Account.query(self.session)] harvester = PolkascanHarvesterService( db_session=self.session, type_registry=TYPE_REGISTRY, type_registry_file=TYPE_REGISTRY_FILE ) if block_ids: block_range = block_ids else: if block_end is None: # Set block end to chaintip substrate = SubstrateInterface(url=SUBSTRATE_RPC_URL, runtime_config=RuntimeConfiguration()) block_end = substrate.get_block_number(substrate.get_chain_finalised_head()) block_range = range(block_start, block_end + 1) for block_id in block_range: for account in accounts: harvester.create_balance_snapshot(block_id, account) self.session.commit() return { 'message': 'Snapshop created', 'account_id': account_id, 'block_start': block_start, 'block_end': block_end, 'block_ids': block_ids }
def on_post(self, req, resp): msg = "TODO" if req.media.get('account_id'): account = Account.query(self.session).filter(Account.id == req.media.get('account_id')).first() if account: substrate = SubstrateInterface(SUBSTRATE_RPC_URL) balance = substrate.get_storage( block_hash=None, module='Balances', function='FreeBalance', params=account.id, return_scale_type='Balance', hasher='Blake2_256') or 0 account.balance = balance self.session.commit() resp.media = { 'status': 'success', 'data': { 'message': msg } } else: resp.status = falcon.HTTP_404 resp.media = {'result': 'Account not found'}
def sequencing_hook(self, db_session, parent_block_data, parent_sequenced_block_data): for account_audit in AccountAudit.query(db_session).filter_by(block_id=self.block.id).order_by('event_idx'): try: account = Account.query(db_session).filter_by(id=account_audit.account_id).one() if account_audit.type_id == ACCOUNT_AUDIT_TYPE_REAPED: account.count_reaped += 1 account.is_reaped = True elif account_audit.type_id == ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = False account.updated_at_block = self.block.id except NoResultFound: account = Account( id=account_audit.account_id, address=ss58_encode(account_audit.account_id, SUBSTRATE_ADDRESS_TYPE), created_at_block=self.block.id, updated_at_block=self.block.id, balance=0 ) # If reaped but does not exist, create new account for now if account_audit.type_id != ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = True account.count_reaped = 1 account.save(db_session)
def sequencing_hook(self, db_session, parent_block_data, parent_sequenced_block_data): for account_audit in AccountAudit.query(db_session).filter_by( block_id=self.block.id).order_by('event_idx'): try: account = Account.query(db_session).filter_by( id=account_audit.account_id).one() if account_audit.type_id == ACCOUNT_AUDIT_TYPE_REAPED: account.count_reaped += 1 account.is_reaped = True elif account_audit.type_id == ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = False account.updated_at_block = self.block.id except NoResultFound: account = Account(id=account_audit.account_id, address=bech32.encode( HRP, bytes().fromhex( account_audit.account_id)), created_at_block=self.block.id, updated_at_block=self.block.id, balance=0) # If reaped but does not exist, create new account for now if account_audit.type_id != ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = True account.count_reaped = 1 account.shard_num = random.randint(1, 10000) account.save(db_session)
def sequencing_hook(self, db_session, parent_block_data, parent_sequenced_block_data): for identity_audit in IdentityJudgementAudit.query( db_session).filter_by( block_id=self.block.id).order_by('event_idx'): if identity_audit.type_id == settings.IDENTITY_JUDGEMENT_TYPE_GIVEN: judgement = IdentityJudgement.query(db_session).filter_by( account_id=identity_audit.account_id, registrar_index=identity_audit.registrar_index).first() if not judgement: judgement = IdentityJudgement( account_id=identity_audit.account_id, registrar_index=identity_audit.registrar_index, created_at_block=self.block.id) if identity_audit.data: judgement.judgement = identity_audit.data.get('judgement') judgement.updated_at_block = self.block.id judgement.save(db_session) account = Account.query(db_session).get( identity_audit.account_id) if account: if judgement.judgement in ['Reasonable', 'KnownGood']: account.identity_judgement_good += 1 if judgement.judgement in ['LowQuality', 'Erroneous']: account.identity_judgement_bad += 1 account.save(db_session) if account.has_subidentity: # Update sub identities sub_accounts = Account.query(db_session).filter_by( parent_identity=account.id) for sub_account in sub_accounts: sub_account.identity_judgement_good = account.identity_judgement_good sub_account.identity_judgement_bad = account.identity_judgement_bad sub_account.save(db_session)
def sequencing_hook(self, db_session, parent_block, parent_sequenced_block): # Update Account according to AccountInfoSnapshot for account_info in AccountInfoSnapshot.query(db_session).filter_by(block_id=self.block.id): account = Account.query(db_session).get(account_info.account_id) if account: account.balance_total = account_info.balance_total account.balance_reserved = account_info.balance_reserved account.balance_free = account_info.balance_free account.nonce = account_info.nonce account.save(db_session)
def sequencing_hook(self, db_session, parent_block_data, parent_sequenced_block_data): for account_index_audit in AccountIndexAudit.query(db_session).filter_by( block_id=self.block.id ).order_by('event_idx'): if account_index_audit.type_id == settings.ACCOUNT_INDEX_AUDIT_TYPE_NEW: # Check if account index already exists account_index = AccountIndex.query(db_session).filter_by( id=account_index_audit.account_index_id ).first() if not account_index: account_index = AccountIndex( id=account_index_audit.account_index_id, created_at_block=self.block.id ) account_index.account_id = account_index_audit.account_id account_index.short_address = ss58_encode_account_index( account_index_audit.account_index_id, settings.SUBSTRATE_ADDRESS_TYPE ) account_index.updated_at_block = self.block.id account_index.save(db_session) # Update index in corresponding account account = Account.query(db_session).get(account_index.account_id) if account: account.index_address = account_index.short_address account.save(db_session) elif account_index_audit.type_id == settings.ACCOUNT_INDEX_AUDIT_TYPE_REAPED: if account_index_audit.account_index_id: account_index_list = AccountIndex.query(db_session).filter_by( id=account_index_audit.account_index_id ) else: account_index_list = AccountIndex.query(db_session).filter_by( account_id=account_index_audit.account_id ) for account_index in account_index_list: account_index.account_id = None account_index.is_reclaimable = True account_index.updated_at_block = self.block.id account_index.save(db_session)
def sequencing_hook(self, db_session, parent_block, parent_sequenced_block): registrars = self.substrate.get_runtime_state( module="Identity", storage_function="Registrars", params=[]).get('result') if not registrars: registrars = [] registrar_ids = [ registrar['account'].replace('0x', '') for registrar in registrars ] Account.query(db_session).filter( Account.id.in_(registrar_ids), Account.was_registrar == False).update( {Account.was_registrar: True}, synchronize_session='fetch') Account.query(db_session).filter(Account.id.notin_(registrar_ids), Account.is_registrar == True).update( {Account.is_registrar: False}, synchronize_session='fetch') Account.query(db_session).filter(Account.id.in_(registrar_ids), Account.is_registrar == False).update( {Account.is_registrar: True}, synchronize_session='fetch')
def accumulation_hook(self, db_session): # Check event requirements if len(self.event.attributes) == 2 and \ self.event.attributes[0]['type'] == 'AccountId' and self.event.attributes[1]['type'] == 'Balance': account_id = self.event.attributes[0]['value'].replace('0x', '') balance = self.event.attributes[1]['value'] self.block._accounts_new.append(account_id) account_audit = AccountAudit( account_id=account_id, block_id=self.event.block_id, extrinsic_idx=self.event.extrinsic_idx, event_idx=self.event.event_idx, type_id=ACCOUNT_AUDIT_TYPE_NEW) account_total = Account.query(db_session).filter_by( id=account_audit.account_id).count() #print('NewAccountEventProcessor get account_total {} =='.format(account_total)) if account_total <= 0: account = Account(id=account_audit.account_id, address=bech32.encode( HRP, bytes().fromhex( account_audit.account_id)), created_at_block=account_audit.block_id, updated_at_block=account_audit.block_id, balance=0) if account_audit.type_id == ACCOUNT_AUDIT_TYPE_REAPED: account.count_reaped += 1 account.is_reaped = True elif account_audit.type_id == ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = False account.updated_at_block = account_audit.block_id # If reaped but does not exist, create new account for now if account_audit.type_id != ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = True account.count_reaped = 1 account.shard_num = 0 #print('NewAccountEventProcessor start add account {} =='.format(account)) account.save(db_session) # db_session.commit() account_audit.save(db_session)
def get_item(self, item_id): account_id = bytes(bech32.decode(HRP, item_id)[1]).hex() account = Account.query(self.session).filter_by(id=account_id).first() # account = Account( # is_reaped=0, # address=item_id, # id=bytes(bech32.decode(HRP, item_id)[1]).hex(), # shard_num=0, # is_validator=1, # is_contract=1, # count_reaped=1, # balance=0, # created_at_block=31, # updated_at_block=31, # ) return account
def sequencing_hook(self, db_session, parent_block_data, parent_sequenced_block_data): for account_audit in AccountAudit.query(db_session).filter_by( block_id=self.block.id).order_by('event_idx'): try: account = Account.query(db_session).filter_by( id=account_audit.account_id).one() if account_audit.type_id == ACCOUNT_AUDIT_TYPE_REAPED: account.count_reaped += 1 account.is_reaped = True elif account_audit.type_id == ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = False account.updated_at_block = self.block.id except NoResultFound: account = Account(id=account_audit.account_id, address=ss58_encode(account_audit.account_id, SUBSTRATE_ADDRESS_TYPE), hash_blake2b=blake2_256( binascii.unhexlify( account_audit.account_id)), created_at_block=self.block.id, updated_at_block=self.block.id, balance=0) # Retrieve index in corresponding account account_index = AccountIndex.query(db_session).filter_by( account_id=account.id).first() if account_index: account.index_address = account_index.short_address account.save(db_session) # # If reaped but does not exist, create new account for now # if account_audit.type_id != ACCOUNT_AUDIT_TYPE_NEW: # account.is_reaped = True # account.count_reaped = 1 account.save(db_session)
def sequencing_hook(self, db_session, parent_block_data, parent_sequenced_block_data): for account_audit in AccountAudit.query(db_session).filter_by(block_id=self.block.id).order_by('event_idx'): try: account = Account.query(db_session).filter_by(id=account_audit.account_id).one() if account_audit.type_id == ACCOUNT_AUDIT_TYPE_REAPED: account.count_reaped += 1 account.is_reaped = True elif account_audit.type_id == ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = False account.updated_at_block = self.block.id except NoResultFound: account = Account( id=account_audit.account_id, address=ss58_encode(account_audit.account_id, SUBSTRATE_ADDRESS_TYPE), created_at_block=self.block.id, updated_at_block=self.block.id, balance=0 ) ### account balance substrate = SubstrateInterface(SUBSTRATE_RPC_URL) balance = substrate.get_storage( block_hash=None, module='Balances', function='FreeBalance', params=account_audit.account_id, return_scale_type='Balance', hasher='Blake2_256') or 0 account.balance = balance # If reaped but does not exist, create new account for now if account_audit.type_id != ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = True account.count_reaped = 1 account.save(db_session)
def sequencing_hook(self, db_session, parent_block, parent_sequenced_block): if self.extrinsic.success: sudo_key = self.extrinsic.params[0]['value'].replace('0x', '') Account.query(db_session).filter( Account.id == sudo_key, Account.was_sudo == False ).update({Account.was_sudo: True}, synchronize_session='fetch') Account.query(db_session).filter( Account.id != sudo_key, Account.is_sudo == True ).update({Account.is_sudo: False}, synchronize_session='fetch') Account.query(db_session).filter( Account.id == sudo_key, Account.is_sudo == False ).update({Account.is_sudo: True}, synchronize_session='fetch')
def sequencing_hook(self, db_session, parent_block, parent_sequenced_block): new_member_ids = [ member_struct['account'].replace('0x', '') for member_struct in self.event.attributes[0]['value'] ] Account.query(db_session).filter( Account.id.in_(new_member_ids), Account.was_council_member == False ).update({Account.was_council_member: True}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.notin_(new_member_ids), Account.is_council_member == True ).update({Account.is_council_member: False}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.in_(new_member_ids), Account.is_council_member == False ).update({Account.is_council_member: True}, synchronize_session='fetch')
def sequencing_hook(self, db_session, parent_block_data, parent_sequenced_block_data): for account_audit in AccountAudit.query(db_session).filter_by(block_id=self.block.id).order_by('event_idx'): try: account = Account.query(db_session).filter_by(id=account_audit.account_id).one() if account_audit.type_id == settings.ACCOUNT_AUDIT_TYPE_REAPED: account.count_reaped += 1 account.is_reaped = True elif account_audit.type_id == settings.ACCOUNT_AUDIT_TYPE_NEW: account.is_reaped = False account.updated_at_block = self.block.id except NoResultFound: account = Account( id=account_audit.account_id, address=ss58_encode(account_audit.account_id, settings.SUBSTRATE_ADDRESS_TYPE), hash_blake2b=blake2_256(binascii.unhexlify(account_audit.account_id)), is_treasury=(account_audit.data or {}).get('is_treasury', False), is_sudo=(account_audit.data or {}).get('is_sudo', False), was_sudo=(account_audit.data or {}).get('is_sudo', False), created_at_block=self.block.id, updated_at_block=self.block.id ) # Retrieve index in corresponding account account_index = AccountIndex.query(db_session).filter_by(account_id=account.id).first() if account_index: account.index_address = account_index.short_address # Retrieve and set initial balance try: account_info_data = self.substrate.get_runtime_state( module='System', storage_function='Account', params=['0x{}'.format(account.id)], block_hash=self.block.hash ).get('result') if account_info_data: account.balance_free = account_info_data["data"]["free"] account.balance_reserved = account_info_data["data"]["reserved"] account.balance_total = account_info_data["data"]["free"] + account_info_data["data"]["reserved"] account.nonce = account_info_data["nonce"] except ValueError: pass # # If reaped but does not exist, create new account for now # if account_audit.type_id != ACCOUNT_AUDIT_TYPE_NEW: # account.is_reaped = True # account.count_reaped = 1 account.save(db_session) # Until SUDO and batch calls are processed separately we need to do a safety check to be sure we include all # accounts that have activity (lookup in account_index) in current block # TODO implement calls for search_index in db_session.query(SearchIndex.account_id).filter( SearchIndex.block_id == self.block.id, SearchIndex.account_id.notin_(db_session.query(Account.id)) ).distinct(): account = Account( id=search_index.account_id, address=ss58_encode(search_index.account_id, settings.SUBSTRATE_ADDRESS_TYPE), hash_blake2b=blake2_256(binascii.unhexlify(search_index.account_id)), created_at_block=self.block.id, updated_at_block=self.block.id ) try: account_info_data = self.substrate.get_runtime_state( module='System', storage_function='Account', params=['0x{}'.format(account.id)], block_hash=self.block.hash ).get('result') if account_info_data: account.balance_free = account_info_data["data"]["free"] account.balance_reserved = account_info_data["data"]["reserved"] account.balance_total = account_info_data["data"]["free"] + account_info_data["data"]["reserved"] account.nonce = account_info_data["nonce"] except ValueError: pass account.save(db_session)
def on_get(self, req, resp, network_id=None): resp.status = falcon.HTTP_200 # TODO make caching more generic for custom resources cache_key = '{}-{}'.format(req.method, req.url) console_handler = logging.StreamHandler() console_handler.setLevel('INFO') logger = logging.getLogger('yee') logger.setLevel('DEBUG') logger.addHandler(console_handler) # logger.info(cache_key) response = self.cache_region.get(cache_key, self.cache_expiration_time) if response is NO_VALUE: best_block = Block.query(self.session).filter_by( id=self.session.query(func.max(Block.id)).one()[0]).first() total_signed_extrinsics = Extrinsic.query( self.session).filter_by(signed=1).count() total_accounts = Account.query(self.session).filter_by().count() # total_events = Event.query(self.session).count() event = Event.query(self.session).filter_by( id=self.session.query(func.max(Event.id)).one()[0]).first() if event is None: eventid = 0 else: eventid = event.id if best_block: substrate = SubstrateInterface( SUBSTRATE_RPC_URL, metadata_version=SUBSTRATE_METADATA_VERSION) print(substrate.get_ShardCount()) response = self.get_jsonapi_response(data={ 'type': 'networkstats', 'id': network_id, 'attributes': { 'best_block': best_block.id, 'total_signed_extrinsics': total_signed_extrinsics, 'total_events': eventid, 'total_events_module': int(best_block.id), 'total_blocks': 'N/A', 'total_accounts': total_accounts, 'total_runtimes': Runtime.query(self.session).count(), 'shard_count': int(substrate.get_ShardCount(), 16) } }, ) else: response = self.get_jsonapi_response(data={ 'type': 'networkstats', 'id': network_id, 'attributes': { 'best_block': 0, 'total_signed_extrinsics': 0, 'total_events': 0, 'total_events_module': 0, 'total_blocks': 'N/A', 'total_accounts': 0, 'total_runtimes': 0 } }, ) self.cache_region.set(cache_key, response) resp.set_header('X-Cache', 'MISS') else: resp.set_header('X-Cache', 'HIT') resp.media = response
def get_item(self, item_id): return Account.query(self.session).filter( or_(Account.address == item_id, Account.index_address == item_id)).first()
def get_query(self): return Account.query(self.session).order_by( Account.balance_total.desc())
def add_session(self, db_session, session_id): nominators = [] # Retrieve current era try: current_era = self.substrate.get_runtime_state( module="Staking", storage_function="CurrentEra", params=[], block_hash=self.block.hash ).get('result') except StorageFunctionNotFound: current_era = None # Retrieve validators for new session from storage try: validators = self.substrate.get_runtime_state( module="Session", storage_function="Validators", params=[], block_hash=self.block.hash ).get('result', []) except StorageFunctionNotFound: validators = [] for rank_nr, validator_account in enumerate(validators): validator_ledger = {} validator_session = None validator_stash = validator_account.replace('0x', '') # Retrieve controller account try: validator_controller = self.substrate.get_runtime_state( module="Staking", storage_function="Bonded", params=[validator_account], block_hash=self.block.hash ).get('result') if validator_controller: validator_controller = validator_controller.replace('0x', '') except StorageFunctionNotFound: validator_controller = None # Retrieve validator preferences for stash account try: validator_prefs = self.substrate.get_runtime_state( module="Staking", storage_function="ErasValidatorPrefs", params=[current_era, validator_account], block_hash=self.block.hash ).get('result') except StorageFunctionNotFound: validator_prefs = None if not validator_prefs: validator_prefs = {'commission': None} # Retrieve bonded try: exposure = self.substrate.get_runtime_state( module="Staking", storage_function="ErasStakers", params=[current_era, validator_account], block_hash=self.block.hash ).get('result') except StorageFunctionNotFound: exposure = None if not exposure: exposure = {} if exposure.get('total'): bonded_nominators = exposure.get('total') - exposure.get('own') else: bonded_nominators = None session_validator = SessionValidator( session_id=session_id, validator_controller=validator_controller, validator_stash=validator_stash, bonded_total=exposure.get('total'), bonded_active=validator_ledger.get('active'), bonded_own=exposure.get('own'), bonded_nominators=bonded_nominators, validator_session=validator_session, rank_validator=rank_nr, unlocking=validator_ledger.get('unlocking'), count_nominators=len(exposure.get('others', [])), unstake_threshold=None, commission=validator_prefs.get('commission') ) session_validator.save(db_session) # Store nominators for rank_nominator, nominator_info in enumerate(exposure.get('others', [])): nominator_stash = nominator_info.get('who').replace('0x', '') nominators.append(nominator_stash) session_nominator = SessionNominator( session_id=session_id, rank_validator=rank_nr, rank_nominator=rank_nominator, nominator_stash=nominator_stash, bonded=nominator_info.get('value'), ) session_nominator.save(db_session) # Store session session = Session( id=session_id, start_at_block=self.block.id + 1, created_at_block=self.block.id, created_at_extrinsic=self.event.extrinsic_idx, created_at_event=self.event.event_idx, count_validators=len(validators), count_nominators=len(set(nominators)), era=current_era ) session.save(db_session) # Retrieve previous session to calculate count_blocks prev_session = Session.query(db_session).filter_by(id=session_id - 1).first() if prev_session: count_blocks = self.block.id - prev_session.start_at_block + 1 else: count_blocks = self.block.id session_total = SessionTotal( id=session_id - 1, end_at_block=self.block.id, count_blocks=count_blocks ) session_total.save(db_session) # Update validator flags validator_ids = [v.replace('0x', '') for v in validators] Account.query(db_session).filter( Account.id.in_(validator_ids), Account.was_validator == False ).update({Account.was_validator: True}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.notin_(validator_ids), Account.is_validator == True ).update({Account.is_validator: False}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.in_(validator_ids), Account.is_validator == False ).update({Account.is_validator: True}, synchronize_session='fetch') # Update nominator flags Account.query(db_session).filter( Account.id.in_(nominators), Account.was_nominator == False ).update({Account.was_nominator: True}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.notin_(nominators), Account.is_nominator == True ).update({Account.is_nominator: False}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.in_(nominators), Account.is_nominator == False ).update({Account.is_nominator: True}, synchronize_session='fetch')
def add_session_old(self, db_session, session_id): current_era = None validators = [] nominators = [] validation_session_lookup = {} substrate = SubstrateInterface( url=settings.SUBSTRATE_RPC_URL, runtime_config=RuntimeConfiguration(), type_registry_preset=settings.TYPE_REGISTRY ) # Retrieve current era storage_call = RuntimeStorage.query(db_session).filter_by( module_id='staking', name='CurrentEra', spec_version=self.block.spec_version_id ).first() if storage_call: try: current_era = substrate.get_storage( block_hash=self.block.hash, module="Staking", function="CurrentEra", return_scale_type=storage_call.get_return_type(), hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) except RemainingScaleBytesNotEmptyException: pass # Retrieve validators for new session from storage storage_call = RuntimeStorage.query(db_session).filter_by( module_id='session', name='Validators', spec_version=self.block.spec_version_id ).first() if storage_call: try: validators = substrate.get_storage( block_hash=self.block.hash, module="Session", function="Validators", return_scale_type=storage_call.get_return_type(), hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) or [] except RemainingScaleBytesNotEmptyException: pass # Retrieve all sessions in one call if not LEGACY_SESSION_VALIDATOR_LOOKUP: # Retrieve session account # TODO move to network specific data types storage_call = RuntimeStorage.query(db_session).filter_by( module_id='session', name='QueuedKeys', spec_version=self.block.spec_version_id ).first() if storage_call: try: validator_session_list = substrate.get_storage( block_hash=self.block.hash, module="Session", function="QueuedKeys", return_scale_type=storage_call.get_return_type(), hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) or [] except RemainingScaleBytesNotEmptyException: try: validator_session_list = substrate.get_storage( block_hash=self.block.hash, module="Session", function="QueuedKeys", return_scale_type='Vec<(ValidatorId, LegacyKeys)>', hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) or [] except RemainingScaleBytesNotEmptyException: validator_session_list = substrate.get_storage( block_hash=self.block.hash, module="Session", function="QueuedKeys", return_scale_type='Vec<(ValidatorId, EdgewareKeys)>', hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) or [] # build lookup dict validation_session_lookup = {} for validator_session_item in validator_session_list: session_key = '' if validator_session_item['keys'].get('grandpa'): session_key = validator_session_item['keys'].get('grandpa') if validator_session_item['keys'].get('ed25519'): session_key = validator_session_item['keys'].get('ed25519') validation_session_lookup[ validator_session_item['validator'].replace('0x', '')] = session_key.replace('0x', '') for rank_nr, validator_account in enumerate(validators): validator_stash = None validator_controller = None validator_ledger = {} validator_prefs = {} validator_session = '' exposure = {} if not LEGACY_SESSION_VALIDATOR_LOOKUP: validator_stash = validator_account.replace('0x', '') # Retrieve stash account storage_call = RuntimeStorage.query(db_session).filter_by( module_id='staking', name='Bonded', spec_version=self.block.spec_version_id ).first() if storage_call: try: validator_controller = substrate.get_storage( block_hash=self.block.hash, module="Staking", function="Bonded", params=validator_stash, return_scale_type=storage_call.get_return_type(), hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) or '' validator_controller = validator_controller.replace('0x', '') except RemainingScaleBytesNotEmptyException: pass # Retrieve session account validator_session = validation_session_lookup.get(validator_stash) else: validator_controller = validator_account.replace('0x', '') # Retrieve stash account storage_call = RuntimeStorage.query(db_session).filter_by( module_id='staking', name='Ledger', spec_version=self.block.spec_version_id ).first() if storage_call: try: validator_ledger = substrate.get_storage( block_hash=self.block.hash, module="Staking", function="Ledger", params=validator_controller, return_scale_type=storage_call.get_return_type(), hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) or {} validator_stash = validator_ledger.get('stash', '').replace('0x', '') except RemainingScaleBytesNotEmptyException: pass # Retrieve session account storage_call = RuntimeStorage.query(db_session).filter_by( module_id='session', name='NextKeyFor', spec_version=self.block.spec_version_id ).first() if storage_call: try: validator_session = substrate.get_storage( block_hash=self.block.hash, module="Session", function="NextKeyFor", params=validator_controller, return_scale_type=storage_call.get_return_type(), hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) or '' except RemainingScaleBytesNotEmptyException: pass validator_session = validator_session.replace('0x', '') # Retrieve validator preferences for stash account storage_call = RuntimeStorage.query(db_session).filter_by( module_id='staking', name='Validators', spec_version=self.block.spec_version_id ).first() if storage_call: try: validator_prefs = substrate.get_storage( block_hash=self.block.hash, module="Staking", function="Validators", params=validator_stash, return_scale_type=storage_call.get_return_type(), hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) or {'col1': {}, 'col2': {}} except RemainingScaleBytesNotEmptyException: pass # Retrieve nominators storage_call = RuntimeStorage.query(db_session).filter_by( module_id='staking', name='Stakers', spec_version=self.block.spec_version_id ).first() if storage_call: try: exposure = substrate.get_storage( block_hash=self.block.hash, module="Staking", function="Stakers", params=validator_stash, return_scale_type=storage_call.get_return_type(), hasher=storage_call.type_hasher, metadata_version=SUBSTRATE_METADATA_VERSION ) or {} except RemainingScaleBytesNotEmptyException: pass if exposure.get('total'): bonded_nominators = exposure.get('total') - exposure.get('own') else: bonded_nominators = None session_validator = SessionValidator( session_id=session_id, validator_controller=validator_controller, validator_stash=validator_stash, bonded_total=exposure.get('total'), bonded_active=validator_ledger.get('active'), bonded_own=exposure.get('own'), bonded_nominators=bonded_nominators, validator_session=validator_session, rank_validator=rank_nr, unlocking=validator_ledger.get('unlocking'), count_nominators=len(exposure.get('others', [])), unstake_threshold=validator_prefs.get('col1', {}).get('unstakeThreshold'), commission=validator_prefs.get('col1', {}).get('validatorPayment') ) session_validator.save(db_session) # Store nominators for rank_nominator, nominator_info in enumerate(exposure.get('others', [])): nominator_stash = nominator_info.get('who').replace('0x', '') nominators.append(nominator_stash) session_nominator = SessionNominator( session_id=session_id, rank_validator=rank_nr, rank_nominator=rank_nominator, nominator_stash=nominator_stash, bonded=nominator_info.get('value'), ) session_nominator.save(db_session) # Store session session = Session( id=session_id, start_at_block=self.block.id + 1, created_at_block=self.block.id, created_at_extrinsic=self.event.extrinsic_idx, created_at_event=self.event.event_idx, count_validators=len(validators), count_nominators=len(set(nominators)), era=current_era ) session.save(db_session) # Retrieve previous session to calculate count_blocks prev_session = Session.query(db_session).filter_by(id=session_id - 1).first() if prev_session: count_blocks = self.block.id - prev_session.start_at_block + 1 else: count_blocks = self.block.id session_total = SessionTotal( id=session_id - 1, end_at_block=self.block.id, count_blocks=count_blocks ) session_total.save(db_session) # Update validator flags validator_ids = [v.replace('0x', '') for v in validators] Account.query(db_session).filter( Account.id.in_(validator_ids), Account.was_validator == False ).update({Account.was_validator: True}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.notin_(validator_ids), Account.is_validator == True ).update({Account.is_validator: False}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.in_(validator_ids), Account.is_validator == False ).update({Account.is_validator: True}, synchronize_session='fetch') # Update nominator flags Account.query(db_session).filter( Account.id.in_(nominators), Account.was_nominator == False ).update({Account.was_nominator: True}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.notin_(nominators), Account.is_nominator == True ).update({Account.is_nominator: False}, synchronize_session='fetch') Account.query(db_session).filter( Account.id.in_(nominators), Account.is_nominator == False ).update({Account.is_nominator: True}, synchronize_session='fetch')
def get_query(self): return Account.query(self.session).order_by( Account.updated_at_block.desc())
def get_item(self, item_id): return Account.query(self.session).filter_by(address=item_id).first()
def sequencing_hook(self, db_session, parent_block_data, parent_sequenced_block_data): for identity_audit in IdentityAudit.query(db_session).filter_by(block_id=self.block.id).order_by('event_idx'): account = Account.query(db_session).get(identity_audit.account_id) if account: if identity_audit.type_id == settings.IDENTITY_TYPE_SET: account.has_identity = True account.identity_display = identity_audit.data.get('display') account.identity_email = identity_audit.data.get('email') account.identity_legal = identity_audit.data.get('legal') account.identity_riot = identity_audit.data.get('riot') account.identity_web = identity_audit.data.get('web') account.identity_twitter = identity_audit.data.get('twitter') if account.has_subidentity: # Update sub accounts sub_accounts = Account.query(db_session).filter_by(parent_identity=account.id) for sub_account in sub_accounts: sub_account.identity_display = account.identity_display sub_account.identity_email = account.identity_email sub_account.identity_legal = account.identity_legal sub_account.identity_riot = account.identity_riot sub_account.identity_web = account.identity_web sub_account.identity_twitter = account.identity_twitter sub_account.save(db_session) account.save(db_session) elif identity_audit.type_id in [settings.IDENTITY_TYPE_CLEARED, settings.IDENTITY_TYPE_KILLED]: if account.has_subidentity: # Clear sub accounts sub_accounts = Account.query(db_session).filter_by(parent_identity=account.id) for sub_account in sub_accounts: sub_account.identity_display = None sub_account.identity_email = None sub_account.identity_legal = None sub_account.identity_riot = None sub_account.identity_web = None sub_account.identity_twitter = None sub_account.parent_identity = None sub_account.has_identity = False sub_account.identity_judgement_good = 0 sub_account.identity_judgement_bad = 0 sub_account.save(db_session) account.has_identity = False account.has_subidentity = False account.identity_display = None account.identity_email = None account.identity_legal = None account.identity_riot = None account.identity_web = None account.identity_twitter = None account.identity_judgement_good = 0 account.identity_judgement_bad = 0 account.save(db_session) elif identity_audit.type_id == settings.IDENTITY_TYPE_SET_SUBS: # Clear current subs sub_accounts = Account.query(db_session).filter_by(parent_identity=account.id) for sub_account in sub_accounts: sub_account.identity_display = None sub_account.identity_email = None sub_account.identity_legal = None sub_account.identity_riot = None sub_account.identity_web = None sub_account.identity_twitter = None sub_account.parent_identity = None sub_account.identity_judgement_good = 0 sub_account.identity_judgement_bad = 0 sub_account.has_identity = False sub_account.save(db_session) account.has_subidentity = False # Process sub indenties if len(identity_audit.data.get('subs', [])) > 0: account.has_subidentity = True for sub_identity in identity_audit.data.get('subs'): sub_account = Account.query(db_session).get(sub_identity['account'].replace('0x', '')) if sub_account: sub_account.parent_identity = account.id sub_account.subidentity_display = sub_identity['name'] sub_account.identity_display = account.identity_display sub_account.identity_email = account.identity_email sub_account.identity_legal = account.identity_legal sub_account.identity_riot = account.identity_riot sub_account.identity_web = account.identity_web sub_account.identity_twitter = account.identity_twitter sub_account.identity_judgement_good = account.identity_judgement_good sub_account.identity_judgement_bad = account.identity_judgement_bad sub_account.has_identity = True sub_account.save(db_session) account.save(db_session)