def on_get(self, req, resp): sequencer_task = Status.get_status(self.session, 'SEQUENCER_TASK_ID') integrity_head = Status.get_status(self.session, 'INTEGRITY_HEAD') sequencer_head = self.session.query(func.max(BlockTotal.id)).one()[0] best_block = Block.query(self.session).filter_by( id=self.session.query(func.max(Block.id)).one()[0]).first() if best_block: best_block_datetime = best_block.datetime.replace(tzinfo=pytz.UTC).timestamp() * 1000 best_block_nr = best_block.id else: best_block_datetime = None best_block_nr = None substrate = SubstrateInterface(SUBSTRATE_RPC_URL) chain_head_block_id = substrate.get_block_number(substrate.get_chain_head()) chain_finalized_block_id = substrate.get_block_number(substrate.get_chain_finalised_head()) resp.media = { 'best_block_datetime': best_block_datetime, 'best_block_nr': best_block_nr, 'sequencer_task': sequencer_task.value, 'sequencer_head': sequencer_head, 'integrity_head': int(integrity_head.value), 'chain_head_block_id': chain_head_block_id, 'chain_finalized_block_id': chain_finalized_block_id }
def on_get(self, req, resp, network_id=None): resp.status = falcon.HTTP_200 # TODO make caching more generic for custom resources cache_key = '{}-{}'.format(req.method, req.url) console_handler = logging.StreamHandler() console_handler.setLevel('INFO') logger = logging.getLogger('yee') logger.setLevel('INFO') logger.addHandler(console_handler) # logger.info(cache_key) response = self.cache_region.get(cache_key, self.cache_expiration_time) if response is NO_VALUE: substrate01 = SubstrateInterface(SHARDS_TABLE['shard.0']) substrate02 = SubstrateInterface(SHARDS_TABLE['shard.1']) substrate03 = SubstrateInterface(SHARDS_TABLE['shard.2']) substrate04 = SubstrateInterface(SHARDS_TABLE['shard.3']) shard01 = substrate01.get_block_header(None) shard01['finalizedNum'] = substrate01.get_block_number( substrate01.get_chain_finalised_head()) shard02 = substrate02.get_block_header(None) shard02['finalizedNum'] = substrate02.get_block_number( substrate02.get_chain_finalised_head()) shard03 = substrate03.get_block_header(None) shard03['finalizedNum'] = substrate03.get_block_number( substrate03.get_chain_finalised_head()) shard04 = substrate04.get_block_header(None) shard04['finalizedNum'] = substrate04.get_block_number( substrate04.get_chain_finalised_head()) response = self.get_jsonapi_response(data={ 'type': 'FinalizedHeadList', 'attributes': { 'shard01': shard01, 'shard02': shard02, 'shard03': shard03, 'shard04': shard04 } }, ) self.cache_region.set(cache_key, response) resp.set_header('X-Cache', 'MISS') else: resp.set_header('X-Cache', 'HIT') resp.media = response
def check_healthy(): for shard in SHARDS_TABLE: substrate_url = SHARDS_TABLE[shard] substrate = SubstrateInterface(substrate_url) block = substrate.get_block_number(None) print('== shard--{} ===substrate_url###{}==block=={} '.format( shard, substrate_url, block))
def _get_last_block(self, node_interface: SubstrateInterface) -> BlockNumber: """Return the chain height. May raise: - RemoteError if there is an error """ log.debug(f'{self.chain} querying last block', url=node_interface.url) try: last_block = node_interface.get_block_number( block_hash=node_interface.get_chain_head(), ) if last_block is None: # For some reason a node can rarely return None as last block raise SubstrateRequestException( f'{self.chain} node failed to request last block. Returned None', ) except ( requests.exceptions.RequestException, SubstrateRequestException, WebSocketException, ValueError, ) as e: message = ( f'{self.chain} failed to request last block ' f'at endpoint: {node_interface.url} due to: {str(e)}.' ) log.error(message) raise RemoteError(message) from e log.debug(f'{self.chain} last block', last_block=last_block) return BlockNumber(last_block)
def _get_last_block(self, node_interface: SubstrateInterface) -> BlockNumber: """Return the chain height. """ log.debug(f'{self.chain} querying last block', url=node_interface.url) try: last_block = node_interface.get_block_number( block_hash=node_interface.get_chain_head(), ) except ( requests.exceptions.RequestException, SubstrateRequestException, # TODO: remove TypeError once py-susbtrate-interface `get_block_number` # handles a None response. Keep ValueError just in case `get_chain_head` # returns None. # https://github.com/polkascan/py-substrate-interface/issues/68 TypeError, ValueError, ) as e: message = ( f'{self.chain} failed to request last block ' f'at endpoint: {node_interface.url} due to: {str(e)}.' ) log.error(message) raise RemoteError(message) from e log.debug(f'{self.chain} last block', last_block=last_block) return BlockNumber(last_block)
def balance_snapshot(self, account_id=None, block_start=1, block_end=None, block_ids=None): if account_id: accounts = [account_id] else: accounts = [account.id for account in Account.query(self.session)] harvester = PolkascanHarvesterService( db_session=self.session, type_registry=TYPE_REGISTRY, type_registry_file=TYPE_REGISTRY_FILE ) if block_ids: block_range = block_ids else: if block_end is None: # Set block end to chaintip substrate = SubstrateInterface(url=SUBSTRATE_RPC_URL, runtime_config=RuntimeConfiguration()) block_end = substrate.get_block_number(substrate.get_chain_finalised_head()) block_range = range(block_start, block_end + 1) for block_id in block_range: for account in accounts: harvester.create_balance_snapshot(block_id, account) self.session.commit() return { 'message': 'Snapshop created', 'account_id': account_id, 'block_start': block_start, 'block_end': block_end, 'block_ids': block_ids }
def on_get(self, req, resp): substrate = SubstrateInterface(url=SUBSTRATE_RPC_URL, address_type=SUBSTRATE_ADDRESS_TYPE, type_registry_preset=TYPE_REGISTRY) resp.status = falcon.HTTP_200 # head = Block.get_head(self.session); head_hash = substrate.get_chain_head() head_number = substrate.get_block_number(head_hash) finalised_head_hash = substrate.get_chain_finalised_head() finalised_head_number = substrate.get_block_number(finalised_head_hash) extrinsicCount = Extrinsic.query(self.session).count() try: validators = substrate.get_runtime_state( module="Session", storage_function="Validators", params=[], block_hash=head_hash).get('result', []) except StorageFunctionNotFound: validators = [] try: validator_count = substrate.get_runtime_state( module="Staking", storage_function="ValidatorCount", params=[], block_hash=head_hash).get('result', 0) except StorageFunctionNotFound: validator_count = 0 transfers_count = Extrinsic.query(self.session).filter( and_(Extrinsic.module_id == 'balances', Extrinsic.call_id == 'transfer')).count() resp.media = { 'status': 'success', 'data': { 'blockNumber': head_number, 'finalizedBlockNumber': finalised_head_number, 'extrinsics': extrinsicCount, 'currValidators': len(validators), 'validators': validator_count, 'transfersCount': transfers_count } }
def _get_last_block(self, node_interface: SubstrateInterface) -> BlockNumber: """Return the chain height. """ log.debug(f'{self.chain} querying last block', url=node_interface.url) try: last_block = node_interface.get_block_number( block_hash=node_interface.get_chain_head(), ) except ( requests.exceptions.RequestException, SubstrateRequestException, WebSocketException, ) as e: message = (f'{self.chain} failed to request last block ' f'at endpoint: {node_interface.url} due to: {str(e)}.') log.error(message) raise RemoteError(message) from e log.debug(f'{self.chain} last block', last_block=last_block) return BlockNumber(last_block)
class JSONRPCResource(BaseResource): def __init__(self, cache_region): self.cache_region = cache_region # Check for custom types in Redis self.substrate = None custom_type_registry = self.cache_region.get('CUSTOM_TYPE_REGISTRY') self.init_type_registry(custom_type_registry) self.block_hash = None self.metadata_decoder = None self.runtime_version = None self.metadata_cache = {} self.methods = [ 'rpc_methods', 'runtime_decodeScale', 'runtime_encodeScale', 'runtime_getMetadata', 'runtime_getMetadataModules', 'runtime_getMetadataCallFunctions', 'runtime_getMetadataCallFunction', 'runtime_getMetadataEvents', 'runtime_getMetadataEvent', 'runtime_getMetadataConstants', 'runtime_getMetadataConstant', 'runtime_getMetadataStorageFunctions', 'runtime_getMetadataStorageFunction', 'runtime_getMetadataErrors', 'runtime_getMetadataError', 'runtime_getState', 'runtime_getTypeRegistry', 'runtime_getType', 'runtime_getCustomTypes', 'runtime_addCustomType', 'runtime_setCustomTypes', 'runtime_removeCustomType', 'runtime_resetCustomTypes', 'runtime_getBlock', 'runtime_createSignaturePayload', 'runtime_createExternalSignerPayload', 'runtime_createExtrinsic', 'runtime_submitExtrinsic', 'runtime_getPaymentInfo', 'keypair_create', 'keypair_inspect', 'keypair_sign', 'keypair_verify' ] def get_request_param(self, params): try: return params.pop(0) except IndexError: raise ValueError("Not enough parameters provided") def init_type_registry(self, custom_type_registry=None): if settings.TYPE_REGISTRY_FILE: type_registry = load_type_registry_file(settings.TYPE_REGISTRY_FILE) else: type_registry = {} if custom_type_registry: type_registry.update(custom_type_registry) self.substrate = SubstrateInterface( url=settings.SUBSTRATE_RPC_URL, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, type_registry_preset=settings.TYPE_REGISTRY, type_registry=custom_type_registry ) if settings.DEBUG: print('Custom types at init: ', custom_type_registry) self.substrate.debug = True def init_request(self, params=None): if params: self.block_hash = self.get_request_param(params) if type(self.block_hash) is int: self.block_hash = self.substrate.get_block_hash(self.block_hash) def on_post(self, req, resp): self.block_hash = None self.metadata_decoder = None self.runtime_version = None self.substrate.request_id = req.media.get('id') method = req.media.get('method') params = req.media.get('params', []) # Check request requirements if not req.media.get('jsonrpc'): resp.media = { "error": { "code": -32600, "message": "Unsupported JSON-RPC protocol version" }, "id": req.media.get('id') } elif not method: resp.media = { "error": { "code": -32601, "message": "Method not found" }, "id": req.media.get('id') } elif method not in self.methods: # Default pass through request to Substrate RPC resp.media = self.substrate.rpc_request(method, params) else: resp.status = falcon.HTTP_200 try: # Process methods if method == 'runtime_getBlock': self.init_request(params) block = self.substrate.get_block(block_hash=self.block_hash) if block: block['extrinsics'] = [extrinsic.value for extrinsic in block['extrinsics']] block['header']["digest"]["logs"] = [log.value for log in block['header']["digest"]["logs"]] response = { "jsonrpc": "2.0", "result": block, "id": req.media.get('id') } elif method == 'runtime_getState': # Init params storage_params = None # Process params module = self.get_request_param(params) storage_function = self.get_request_param(params) if params: storage_params = self.get_request_param(params) self.init_request(params) # Get response obj = self.substrate.query( module=module, storage_function=storage_function, params=storage_params, block_hash=self.block_hash ) response = {'result': obj.value if obj else None} elif method == 'runtime_getMetadata': # Process params self.init_request(params) # Get response response = self.substrate.get_runtime_metadata(block_hash=self.block_hash) elif method in ['runtime_createSignaturePayload', 'runtime_createExternalSignerPayload']: account = self.get_request_param(params) call_module = self.get_request_param(params) call_function = self.get_request_param(params) call_params = self.get_request_param(params) tip = self.get_request_param(params) or 0 era = self.get_request_param(params) self.init_request(params) try: # Create call call = self.substrate.compose_call( call_module=call_module, call_function=call_function, call_params=call_params, block_hash=self.block_hash ) nonce = self.substrate.get_account_nonce(account) or 0 if isinstance(era, dict) and 'current' not in era and 'phase' not in era: # Retrieve current block id era['current'] = self.substrate.get_block_number(self.substrate.get_chain_head()) if method == 'runtime_createExternalSignerPayload': include_call_length = True else: include_call_length = False # Generate signature payload signature_payload = self.substrate.generate_signature_payload( call=call, nonce=nonce, tip=tip, era=era, include_call_length=include_call_length ) response = { "jsonrpc": "2.0", "result": { 'signature_payload': str(signature_payload), 'nonce': nonce, 'era': era }, "id": req.media.get('id') } except ValueError as e: response = { "jsonrpc": "2.0", "error": { "code": -999, "message": str(e) }, "id": req.media.get('id') } elif method in ['runtime_submitExtrinsic', 'runtime_createExtrinsic']: account = self.get_request_param(params) call_module = self.get_request_param(params) call_function = self.get_request_param(params) call_params = self.get_request_param(params) tip = self.get_request_param(params) or 0 era = self.get_request_param(params) crypto_type = int(self.get_request_param(params) or 1) signature = self.get_request_param(params) self.init_request(params) try: # Create call call = self.substrate.compose_call( call_module=call_module, call_function=call_function, call_params=call_params, block_hash=self.block_hash ) nonce = self.substrate.get_account_nonce(account) or 0 # Create keypair with only public given given in request keypair = Keypair(ss58_address=account, crypto_type=crypto_type) if isinstance(era, dict) and 'current' in era: era['current'] = int(era['current']) # Create extrinsic extrinsic = self.substrate.create_signed_extrinsic( call=call, keypair=keypair, nonce=nonce, signature=signature, tip=tip, era=era ) if method == 'runtime_createExtrinsic': result = str(extrinsic.data) else: # Submit extrinsic to the node extrinsic_result = self.substrate.submit_extrinsic( extrinsic=extrinsic ) result = { "extrinsic_hash": extrinsic_result.extrinsic_hash, "block_hash": extrinsic_result.block_hash, "finalized": extrinsic_result.finalized, } response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } except ValueError as e: response = { "jsonrpc": "2.0", "error": { "code": -999, "message": str(e) }, "id": req.media.get('id') } except SubstrateRequestException as e: response = { "jsonrpc": "2.0", "error": e.args[0], "id": req.media.get('id') } elif method == 'runtime_getPaymentInfo': account = self.get_request_param(params) call_module = self.get_request_param(params) call_function = self.get_request_param(params) call_params = self.get_request_param(params) # Create call call = self.substrate.compose_call( call_module=call_module, call_function=call_function, call_params=call_params ) # Create keypair with only public given given in request keypair = Keypair(ss58_address=account) response = { "jsonrpc": "2.0", "result": self.substrate.get_payment_info(call=call, keypair=keypair), "id": req.media.get('id') } elif method == 'runtime_getMetadataModules': self.init_request(params) response = { "jsonrpc": "2.0", "result": self.substrate.get_metadata_modules(block_hash=self.block_hash), "id": req.media.get('id') } elif method == 'runtime_getMetadataCallFunctions': self.init_request(params) call_list = self.substrate.get_metadata_call_functions(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": call_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataCallFunction': param_call_module = self.get_request_param(params) param_call_module_function = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_call_function( module_name=param_call_module, call_function_name=param_call_module_function, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result.value, "id": req.media.get('id') } elif method == 'runtime_getMetadataEvents': self.init_request(params) event_list = self.substrate.get_metadata_events(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": event_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataEvent': param_call_module = self.get_request_param(params) param_call_module_event = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_event( module_name=param_call_module, event_name=param_call_module_event, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getMetadataConstants': self.init_request(params) constant_list = self.substrate.get_metadata_constants(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": constant_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataConstant': module_name = self.get_request_param(params) constant_name = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_constant( module_name=module_name, constant_name=constant_name, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getMetadataStorageFunctions': self.init_request(params) storage_list = self.substrate.get_metadata_storage_functions(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": storage_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataStorageFunction': module_name = self.get_request_param(params) storage_name = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_storage_function( module_name=module_name, storage_name=storage_name, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result.value, "id": req.media.get('id') } elif method == 'runtime_getMetadataErrors': self.init_request(params) error_list = self.substrate.get_metadata_errors(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": error_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataError': module_name = self.get_request_param(params) error_name = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_error( module_name=module_name, error_name=error_name, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getTypeRegistry': self.init_request(params) result = self.substrate.get_type_registry(block_hash=self.block_hash) if result: result = list(result.values()) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getType': type_string = self.get_request_param(params) self.init_request(params) response = { "jsonrpc": "2.0", "result": self.substrate.get_type_definition(type_string, block_hash=self.block_hash), "id": req.media.get('id') } elif method == 'runtime_addCustomType': type_string = self.get_request_param(params) type_definition = self.get_request_param(params) # Retrieve current custom type registry custom_type_registry = self.cache_region.get('CUSTOM_TYPE_REGISTRY') if not custom_type_registry: custom_type_registry = { 'types': { } } custom_type_registry['types'][type_string] = type_definition # TODO Try to decode given type definition # Store updated custom type registry self.cache_region.set('CUSTOM_TYPE_REGISTRY', custom_type_registry) if settings.DEBUG: print('Custom types updated to: ', custom_type_registry) # Update runtime configuration RuntimeConfiguration().update_type_registry(custom_type_registry) response = { "jsonrpc": "2.0", "result": "Type registry updated", "id": req.media.get('id') } elif method == 'runtime_setCustomTypes': custom_types = self.get_request_param(params) if type(custom_types) is not dict: raise ValueError('custom types must be in format: {"type_string": "type_definition"}') custom_type_registry = { 'types': custom_types } # Store updated custom type registry self.cache_region.set('CUSTOM_TYPE_REGISTRY', custom_type_registry) # Reset runtime configuration RuntimeConfiguration().clear_type_registry() self.init_type_registry(custom_type_registry) if settings.DEBUG: print('Custom types updated to: ', custom_type_registry) response = { "jsonrpc": "2.0", "result": "Type registry updated", "id": req.media.get('id') } elif method == 'runtime_resetCustomTypes': custom_type_registry = None # Store updated custom type registry self.cache_region.set('CUSTOM_TYPE_REGISTRY', custom_type_registry) # Reset runtime configuration RuntimeConfiguration().clear_type_registry() self.init_type_registry() if settings.DEBUG: print('Custom types cleared') response = { "jsonrpc": "2.0", "result": "Custom types cleared", "id": req.media.get('id') } elif method == 'runtime_removeCustomType': type_string = self.get_request_param(params) # Retrieve current custom type registry custom_type_registry = self.cache_region.get('CUSTOM_TYPE_REGISTRY') if custom_type_registry and type_string in custom_type_registry.get('types', {}): del custom_type_registry['types'][type_string] # Store updated custom type registry self.cache_region.set('CUSTOM_TYPE_REGISTRY', custom_type_registry) # Reset runtime configuration RuntimeConfiguration().clear_type_registry() self.init_type_registry(custom_type_registry) result = '"{}" removed from custom type registry'.format(type_string) else: result = '"{}" not found in custom type registry'.format(type_string) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getCustomTypes': custom_type_registry = self.cache_region.get('CUSTOM_TYPE_REGISTRY') if custom_type_registry: result = custom_type_registry.get('types') else: result = {} response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_decodeScale': type_string = self.get_request_param(params) scale_hex_bytes = self.get_request_param(params) self.init_request(params) result = self.substrate.decode_scale( type_string=type_string, scale_bytes=scale_hex_bytes, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_encodeScale': type_string = self.get_request_param(params) value = self.get_request_param(params) self.init_request(params) result = self.substrate.encode_scale( type_string=type_string, value=value, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'keypair_create': word_count = self.get_request_param(params) or 0 crypto_type = int(self.get_request_param(params) or 1) mnemonic = Keypair.generate_mnemonic(word_count) keypair = Keypair.create_from_mnemonic( mnemonic=mnemonic, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, crypto_type=crypto_type ) response = { "jsonrpc": "2.0", "result": { 'ss58_address': keypair.ss58_address, 'public_key': keypair.public_key, 'private_key': keypair.private_key, 'mnemonic': keypair.mnemonic, }, "id": req.media.get('id') } elif method == 'keypair_inspect': mnemonic = self.get_request_param(params) crypto_type = int(self.get_request_param(params) or 1) keypair = Keypair.create_from_mnemonic( mnemonic=mnemonic, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, crypto_type=crypto_type ) response = { "jsonrpc": "2.0", "result": { 'ss58_address': keypair.ss58_address, 'public_key': keypair.public_key, 'private_key': keypair.private_key, 'mnemonic': keypair.mnemonic, }, "id": req.media.get('id') } elif method == 'keypair_sign': mnemonic = self.get_request_param(params) data = self.get_request_param(params) crypto_type = int(self.get_request_param(params) or 1) keypair = Keypair.create_from_mnemonic( mnemonic=mnemonic, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, crypto_type=crypto_type ) signature = keypair.sign(data) response = { "jsonrpc": "2.0", "result": {'signature': signature}, "id": req.media.get('id') } elif method == 'keypair_verify': account_address = self.get_request_param(params) data = self.get_request_param(params) signature = self.get_request_param(params) crypto_type = int(self.get_request_param(params) or 1) keypair = Keypair( ss58_address=account_address, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, crypto_type=crypto_type ) result = keypair.verify(data, signature) response = { "jsonrpc": "2.0", "result": {'verified': result}, "id": req.media.get('id') } elif method == 'rpc_methods': response = self.substrate.rpc_request(method, params) # Add additional implemented method response['result']['methods'] = sorted(response['result']['methods'] + self.methods) else: raise NotImplementedError('Method \'{}\' not implemented yet'.format(method)) except (ValueError, NotImplementedError) as e: response = { "error": { "code": -999, "message": str(e) }, "id": req.media.get('id') } except (InvalidScaleTypeValueException, RemainingScaleBytesNotEmptyException) as e: response = { "error": { "code": -998, "message": "Decoding error, given SCALE-value or type registry might be invalid " }, "id": req.media.get('id') } resp.media = response
def start_harvester(self, check_gaps=False, shard=None): shard = self.request.args[0] if shard is None: raise HarvesterNotshardParamsError( 'params shard is missing.. stopping harvester ') print("start_harvester") substrate_url = SHARDS_TABLE[shard] print('== start_harvester substrate_url {} =='.format(substrate_url)) substrate = SubstrateInterface(substrate_url) n = Block.query(self.session).filter_by(bid=1).count() if n < 4: print('waiting init task completed! count().n: {} '.format(n)) return {'result': 'waiting init task completed! '} block_sets = [] harvester = PolkascanHarvesterService(self.session, type_registry=TYPE_REGISTRY) harvester.metadata_store = self.metadata_store start_block_hash = substrate.get_chain_head() end_block_hash = None r = 10 block_nr = substrate.get_block_number(start_block_hash) max_block = Block.query(self.session).filter_by( shard_num=shard.split(".")[1]).order_by(Block.bid.desc()).first() print('start block_nr {} =='.format(block_nr)) print('start max_block {} =='.format(max_block.bid)) if block_nr - max_block.bid < 10: r = block_nr - max_block.bid print('current range r: {} =='.format(max_block.bid)) try: for nr in range(1, r + 1): block_hash = substrate.get_block_hash(max_block.bid + nr) if harvester.add_block(block_hash, substrate_url): print('start_harvester+ Added {} '.format(block_hash)) self.session.commit() # Update persistent metadata store in Celery task self.metadata_store = harvester.metadata_store except BlockAlreadyAdded as e: print('. Skipped {} '.format(block_hash)) except IntegrityError as e: print('. Skipped duplicate {}=={} '.format(block_hash, e)) except Exception as exc: print('! ERROR adding {}'.format(block_hash)) raise HarvesterCouldNotAddBlock(block_hash) from exc block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) return { 'result': 'Yee data Synchronization job SUCCESS', 'block_sets': block_sets, 'result': 'Synch data from {} to {} blocks check by shardnum of {}'.format( max_block.bid + 1, r + max_block.bid + 1, shard) }
def accumulate_block_recursive(self, block_hash, end_block_hash=None, substrate_url=None): print( 'start accumulate_block_recursive block_hash {} =='.format(block_hash)) print('start accumulate_block_recursive substrate_url {} =='.format( substrate_url)) harvester = PolkascanHarvesterService(self.session, type_registry=TYPE_REGISTRY) harvester.metadata_store = self.metadata_store # If metadata store isn't initialized yet, perform some tests if not substrate_url: return # shard_num = NUM[substrate_url] # print('start accumulate_block_recursive shard_num {} =='.format(shard_num)) substrate = SubstrateInterface(substrate_url) block_nr = substrate.get_block_number(block_hash) block = None max_sequenced_block_id = False block_one = None add_count = 0 try: for nr in range(0, block_nr + 1): if not block or block.bid > 0: # Process block blocka = harvester.add_block(block_hash, substrate_url) if blocka: print('+ Added {} '.format(block_hash)) add_count += 1 self.session.commit() block = blocka # Break loop if targeted end block hash is reached if block_hash == end_block_hash or block.bid == 0: block_one = block break # Continue with parent block hash block_hash = block.parent_hash # Update persistent metadata store in Celery task self.metadata_store = harvester.metadata_store harvester.process_shard_genesis(block_one, substrate_url) # if block_hash != end_block_hash and block and block.bid > 0: # accumulate_block_recursive.delay(block.parent_hash, end_block_hash) except BlockAlreadyAdded as e: print('. Skipped {} '.format(block_hash)) except IntegrityError as e: print('. Skipped duplicate {}=={} '.format(block_hash, e)) except Exception as exc: print('! ERROR adding {}'.format(block_hash)) raise HarvesterCouldNotAddBlock(block_hash) from exc return { 'result': '{} blocks added'.format(add_count), 'lastAddedBlockHash': block_hash, 'sequencerStartedFrom': max_sequenced_block_id }
def integrity_checks(self): # 1. Check finalized head substrate = SubstrateInterface(settings.SUBSTRATE_RPC_URL) if settings.FINALIZATION_BY_BLOCK_CONFIRMATIONS > 0: finalized_block_hash = substrate.get_chain_head() finalized_block_number = max( substrate.get_block_number(finalized_block_hash) - settings.FINALIZATION_BY_BLOCK_CONFIRMATIONS, 0 ) else: finalized_block_hash = substrate.get_chain_finalised_head() finalized_block_number = substrate.get_block_number(finalized_block_hash) # 2. Check integrity head integrity_head = Status.get_status(self.db_session, 'INTEGRITY_HEAD') if not integrity_head.value: # Only continue if block #1 exists if Block.query(self.db_session).filter_by(id=1).count() == 0: raise BlockIntegrityError('Chain not at genesis') integrity_head.value = 0 else: integrity_head.value = int(integrity_head.value) start_block_id = max(integrity_head.value - 1, 0) end_block_id = finalized_block_number chunk_size = 1000 parent_block = None if start_block_id < end_block_id: # Continue integrity check # print('== Start integrity checks from {} to {} =='.format(start_block_id, end_block_id)) for block_nr in range(start_block_id, end_block_id, chunk_size): # TODO replace limit with filter_by block range block_range = Block.query(self.db_session).order_by('id')[block_nr:block_nr + chunk_size] for block in block_range: if parent_block: if block.id != parent_block.id + 1: # Save integrity head if block hash of parent matches with hash in node if parent_block.hash == substrate.get_block_hash(integrity_head.value): integrity_head.save(self.db_session) self.db_session.commit() raise BlockIntegrityError('Block #{} is missing.. stopping check '.format(parent_block.id + 1)) elif block.parent_hash != parent_block.hash: self.process_reorg_block(parent_block) self.process_reorg_block(block) self.remove_block(block.hash) self.remove_block(parent_block.hash) self.db_session.commit() self.add_block(substrate.get_block_hash(block.id)) self.add_block(substrate.get_block_hash(parent_block.id)) self.db_session.commit() integrity_head.value = parent_block.id - 1 # Save integrity head if block hash of parent matches with hash in node #if parent_block.parent_hash == substrate.get_block_hash(integrity_head.value): integrity_head.save(self.db_session) self.db_session.commit() raise BlockIntegrityError('Block #{} failed integrity checks, Re-adding #{}.. '.format(parent_block.id, block.id)) else: integrity_head.value = block.id parent_block = block if block.id == end_block_id: break if parent_block: if parent_block.hash == substrate.get_block_hash(int(integrity_head.value)): integrity_head.save(self.db_session) self.db_session.commit() return {'integrity_head': integrity_head.value}
def accumulate_block_recursive(self, block_hash, end_block_hash=None): harvester = PolkascanHarvesterService(self.session, type_registry=TYPE_REGISTRY) harvester.metadata_store = self.metadata_store # If metadata store isn't initialized yet, perform some tests if not harvester.metadata_store: print('Init: create entrypoints') # Check if blocks exists max_block_id = self.session.query(func.max(Block.id)).one()[0] if not max_block_id: # Speed up accumulating by creating several entry points substrate = SubstrateInterface(SUBSTRATE_RPC_URL) block_nr = substrate.get_block_number(block_hash) if block_nr > 100: for entry_point in range(0, block_nr, block_nr // 4)[1:-1]: entry_point_hash = substrate.get_block_hash(entry_point) accumulate_block_recursive.delay(entry_point_hash) block = None max_sequenced_block_id = False add_count = 0 try: for nr in range(0, 10): if not block or block.id > 0: # Process block block = harvester.add_block(block_hash) print('+ Added {} '.format(block_hash)) add_count += 1 self.session.commit() # Break loop if targeted end block hash is reached if block_hash == end_block_hash or block.id == 0: break # Continue with parent block hash block_hash = block.parent_hash # Update persistent metadata store in Celery task self.metadata_store = harvester.metadata_store if block_hash != end_block_hash and block and block.id > 0: accumulate_block_recursive.delay(block.parent_hash, end_block_hash) except BlockAlreadyAdded as e: print('. Skipped {} '.format(block_hash)) start_sequencer.delay() except IntegrityError as e: print('. Skipped duplicate {} '.format(block_hash)) except Exception as exc: print('! ERROR adding {}'.format(block_hash)) raise HarvesterCouldNotAddBlock(block_hash) from exc return { 'result': '{} blocks added'.format(add_count), 'lastAddedBlockHash': block_hash, 'sequencerStartedFrom': max_sequenced_block_id }
class Subtensor: """ Handles interactions with the subtensor chain. """ custom_type_registry = { "runtime_id": 2, "types": { "NeuronMetadataOf": { "type": "struct", "type_mapping": [["ip", "u128"], ["port", "u16"], ["ip_type", "u8"], ["uid", "u64"], ["modality", "u8"], ["hotkey", "AccountId"], ["coldkey", "AccountId"]] } } } def __init__(self, config: 'Munch' = None, network: str = None, chain_endpoint: str = None): r""" Initializes a subtensor chain interface. Args: config (:obj:`Munch`, `optional`): metagraph.Metagraph.config() network (default='akira', type=str) The subtensor network flag. The likely choices are: -- akira (testing network) -- kusanagi (main network) If this option is set it overloads subtensor.chain_endpoint with an entry point node from that network. chain_endpoint (default=None, type=str) The subtensor endpoint flag. If set, overrides the --network flag. """ if config == None: config = Subtensor.default_config() config.subtensor.network = network if network != None else config.subtensor.network config.subtensor.chain_endpoint = chain_endpoint if chain_endpoint != None else config.subtensor.chain_endpoint Subtensor.check_config(config) self.config = copy.deepcopy(config) chain_endpoint = "ws://subtensor.rawatech.com:9944" if not chain_endpoint else "ws://" + chain_endpoint # chain_endpoint = "ws://feynman.kusanagi.bittensor.com:9944" if not chain_endpoint else "ws://" + chain_endpoint self.substrate = SubstrateInterface( ss58_format=42, type_registry_preset='substrate-node-template', type_registry=self.custom_type_registry, url=chain_endpoint) @staticmethod def default_config() -> Munch: # Parses and returns a config Munch for this object. parser = argparse.ArgumentParser() Subtensor.add_args(parser) config = bittensor.config.Config.to_config(parser) return config @staticmethod def add_args(parser: argparse.ArgumentParser): try: parser.add_argument( '--subtensor.network', default='kusanagi', type=str, help='''The subtensor network flag. The likely choices are: -- akira (testing network) -- kusanagi (main network) If this option is set it overloads subtensor.chain_endpoint with an entry point node from that network. ''') parser.add_argument( '--subtensor.chain_endpoint', default=None, type=str, help= '''The subtensor endpoint flag. If set, overrides the --network flag. ''') except: pass @staticmethod def check_config(config: Munch): pass def endpoint_for_network(self, blacklist: List[str] = []) -> str: r""" Returns a chain endpoint based on config.subtensor.network. Returns None if there are no available endpoints. Raises: endpoint (str): Websocket endpoint or None if there are none available. """ # Chain endpoint overrides the --network flag. if self.config.subtensor.chain_endpoint != None: if self.config.subtensor.chain_endpoint in blacklist: return None else: return self.config.subtensor.chain_endpoint # Else defaults to networks. # TODO(const): this should probably make a DNS lookup. if self.config.subtensor.network == "akira": akira_available = [ item for item in bittensor.__akira_entrypoints__ if item not in blacklist ] if len(akira_available) == 0: return None return random.choice(akira_available) elif self.config.subtensor.network == "boltzmann": boltzmann_available = [ item for item in bittensor.__boltzmann_entrypoints__ if item not in blacklist ] if len(boltzmann_available) == 0: return None return random.choice(boltzmann_available) elif self.config.subtensor.network == "kusanagi": kusanagi_available = [ item for item in bittensor.__kusanagi_entrypoints__ if item not in blacklist ] if len(kusanagi_available) == 0: return None return random.choice(kusanagi_available) elif self.config.subtensor.network == "local": local_available = [ item for item in bittensor.__local_entrypoints__ if item not in blacklist ] if len(local_available) == 0: return None return random.choice(local_available) else: akira_available = [ item for item in bittensor.__akira_entrypoints__ if item not in blacklist ] if len(akira_available) == 0: return None return random.choice(akira_available) # def is_connected(self) -> bool: # r""" Returns true if the connection state as a boolean. # Raises: # success (bool): # True is the websocket is connected to the chain endpoint. # """ # loop = asyncio.get_event_loop() # loop.set_debug(enabled=True) # return loop.run_until_complete(self.async_is_connected()) # async def async_is_connected(self) -> bool: # r""" Returns the connection state as a boolean. # Raises: # success (bool): # True is the websocket is connected to the chain endpoint. # """ # return self.substrate.async_is_connected() # def check_connection(self) -> bool: # r""" Checks if substrate_old websocket backend is connected, connects if it is not. # """ # loop = asyncio.get_event_loop() # loop.set_debug(enabled=True) # return loop.run_until_complete(self.async_check_connection()) # async def async_check_connection(self) -> bool: # r""" Checks if substrate_old websocket backend is connected, connects if it is not. # """ # if not self.async_is_connected(): # return self.async_connect() # return True # def connect( self, timeout: int = 10, failure = True ) -> bool: # r""" Attempts to connect the substrate_old interface backend. # If the connection fails, attemps another endpoint until a timeout. # Args: # timeout (int): # Time to wait before subscription times out. # failure (bool): # This connection attempt raises an error an a failed attempt. # Returns: # success (bool): # True on success. # """ # loop = asyncio.get_event_loop() # loop.set_debug(enabled=True) # return loop.run_until_complete(self.async_connect(timeout, failure)) # async def async_connect( self, timeout: int = 10, failure = True ) -> bool: # r""" Attempts to connect the substrate_old interface backend. # If the connection fails, attemps another endpoint until a timeout. # Args: # timeout (int): # Time to wait before subscription times out. # failure (bool): # This connection attempt raises an error an a failed attempt. # Returns: # success (bool): # True on success. # """ # start_time = time.time() # attempted_endpoints = [] # while True: # def connection_error_message(): # print(''' # Check that your internet connection is working and the chain endpoints are available: <cyan>{}</cyan> # The subtensor.network should likely be one of the following choices: # -- local - (your locally running node) # -- akira - (testnet) # -- kusanagi - (mainnet) # Or you may set the endpoint manually using the --subtensor.chain_endpoint flag # To run a local node (See: docs/running_a_validator.md) \n # '''.format( attempted_endpoints) ) # # # ---- Get next endpoint ---- # ws_chain_endpoint = self.endpoint_for_network( blacklist = attempted_endpoints ) # if ws_chain_endpoint == None: # logger.error("No more endpoints available for subtensor.network: <cyan>{}</cyan>, attempted: <cyan>{}</cyan>".format(self.config.subtensor.network, attempted_endpoints)) # connection_error_message() # if failure: # logger.critical('Unable to connect to network:<cyan>{}</cyan>.\nMake sure your internet connection is stable and the network is properly set.'.format(self.config.subtensor.network)) # else: # return False # attempted_endpoints.append(ws_chain_endpoint) # # # --- Attempt connection ---- # if self.substrate.async_connect( ws_chain_endpoint, timeout = 5 ): # logger.success("Connected to network:<cyan>{}</cyan> at endpoint:<cyan>{}</cyan>".format(self.config.subtensor.network, ws_chain_endpoint)) # return True # # # ---- Timeout ---- # elif (time.time() - start_time) > timeout: # logger.error( "Error while connecting to network:<cyan>{}</cyan> at endpoint: <cyan>{}</cyan>".format(self.config.subtensor.network, ws_chain_endpoint)) # connection_error_message() # if failure: # raise RuntimeError('Unable to connect to network:<cyan>{}</cyan>.\nMake sure your internet connection is stable and the network is properly set.'.format(self.config.subtensor.network)) # else: # return False # async def _submit_and_check_extrinsic( # self, # extrinsic, # wait_for_inclusion:bool = False, # wait_for_finalization: bool = False, # timeout: int = bittensor.__blocktime__ * 3 # ) -> bool: # r""" Makes an extrinsic call to the chain, returns true if the extrinsic send was a success. # If wait_for_inclusion or wait_for_finalization are true, the call will return true iff the # extrinsic enters or finalizes in a block. # Args: # extrinsic (substrate_old extrinsic): # Extrinsic to send to the chain. # wait_for_inclusion (bool): # If set, waits for the extrinsic to enter a block before returning true, # or returns false if the extrinsic fails to enter the block within the timeout. # wait_for_finalization (bool): # If set, waits for the extrinsic to be finalized on the chain before returning true, # or returns false if the extrinsic fails to be finalized within the timeout. # timeout (int): # Time that this call waits for either finalization of inclusion. # Returns: # success (bool): # flag is true if extrinsic was finalized or uncluded in the block. # If we did not wait for finalization / inclusion, the response is true. # """ # # Send extrinsic # try: # response = self.substrate.submit_extrinsic( # extrinsic, # wait_for_inclusion = wait_for_inclusion, # wait_for_finalization = wait_for_finalization, # timeout = timeout # ) # except SubstrateRequestException as e: # logger.error('Extrinsic exception with error {}', e) # return False # except Exception as e: # logger.error('Error submitting extrinsic with error {}', e) # return False # # # Check timeout. # if response == None: # logger.error('Error in extrinsic: No response within timeout') # return False # # # Check result. # if not wait_for_inclusion and not wait_for_finalization: # return True # else: # if 'error' in response: # logger.error('Error in extrinsic: {}', response['error']) # elif 'finalized' in response and response['finalized'] == True: # return True # elif 'inBlock' in response and response['inBlock'] == True: # return True # else: # return False def is_subscribed(self, wallet: 'bittensor.wallet.Wallet', ip: str, port: int, modality: int) -> bool: r""" Returns true if the bittensor endpoint is already subscribed with the wallet and metadata. Args: wallet (bittensor.wallet.Wallet): bittensor wallet object. ip (str): endpoint host port i.e. 192.122.31.4 port (int): endpoint port number i.e. 9221 modality (int): int encoded endpoint modality i.e 0 for TEXT coldkeypub (str): string encoded coldekey pub. """ uid = self.get_uid_for_pubkey(wallet.hotkey.public_key) if uid is None: return False neuron = self.get_neuron_for_uid(uid) if neuron['ip'] == net.ip_to_int(ip) and neuron['port'] == port: return True else: return False def subscribe( self, wallet: 'bittensor.wallet.Wallet', ip: str, port: int, modality: int, wait_for_inclusion: bool = False, wait_for_finalization=True, timeout: int = 3 * bittensor.__blocktime__, ) -> bool: r""" Subscribes an bittensor endpoint to the substensor chain. Args: wallet (bittensor.wallet.Wallet): bittensor wallet object. ip (str): endpoint host port i.e. 192.122.31.4 port (int): endpoint port number i.e. 9221 modality (int): int encoded endpoint modality i.e 0 for TEXT wait_for_inclusion (bool): if set, waits for the extrinsic to enter a block before returning true, or returns false if the extrinsic fails to enter the block within the timeout. wait_for_finalization (bool): if set, waits for the extrinsic to be finalized on the chain before returning true, or returns false if the extrinsic fails to be finalized within the timeout. timeout (int): time that this call waits for either finalization of inclusion. Returns: success (bool): flag is true if extrinsic was finalized or uncluded in the block. If we did not wait for finalization / inclusion, the response is true. """ if self.is_subscribed(wallet, ip, port, modality): logger.success( "Already subscribed with:\n<cyan>[\n ip: {},\n port: {},\n modality: {},\n hotkey: {},\n coldkey: {}\n]</cyan>" .format(ip, port, modality, wallet.hotkey.public_key, wallet.coldkeypub)) return True ip_as_int = net.ip_to_int(ip) params = { 'ip': ip_as_int, 'port': port, 'ip_type': 4, 'modality': modality, 'coldkey': wallet.coldkeypub, } call = self.substrate.compose_call(call_module='SubtensorModule', call_function='subscribe', call_params=params) # TODO (const): hotkey should be an argument here not assumed. Either that or the coldkey pub should also be assumed. extrinsic = self.substrate.create_signed_extrinsic( call=call, keypair=wallet.hotkey) result = self.substrate.submit_extrinsic( extrinsic, wait_for_inclusion, wait_for_finalization).is_success if result: logger.success( "Successfully subscribed with:\n<cyan>[\n ip: {},\n port: {},\n modality: {},\n hotkey: {},\n coldkey: {}\n]</cyan>" .format(ip, port, modality, wallet.hotkey.public_key, wallet.coldkeypub)) else: logger.error("Failed to subscribe") return result def add_stake( self, wallet: 'bittensor.wallet.Wallet', amount: Balance, hotkey_id: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, timeout: int = 3 * bittensor.__blocktime__, ) -> bool: r""" Adds the specified amount of stake to passed hotkey uid. Args: wallet (bittensor.wallet.Wallet): bittensor wallet object. amount (bittensor.utils.balance.Balance): amount to stake as bittensor balance hotkey_id (int): uid of hotkey to stake into. wait_for_inclusion (bool): if set, waits for the extrinsic to enter a block before returning true, or returns false if the extrinsic fails to enter the block within the timeout. wait_for_finalization (bool): if set, waits for the extrinsic to be finalized on the chain before returning true, or returns false if the extrinsic fails to be finalized within the timeout. timeout (int): time that this call waits for either finalization of inclusion. Returns: success (bool): flag is true if extrinsic was finalized or uncluded in the block. If we did not wait for finalization / inclusion, the response is true. """ call = self.substrate.compose_call(call_module='SubtensorModule', call_function='add_stake', call_params={ 'hotkey': hotkey_id, 'ammount_staked': amount.rao }) extrinsic = self.substrate.create_signed_extrinsic( call=call, keypair=wallet.coldkey) return self.substrate.submit_extrinsic( extrinsic, wait_for_inclusion, wait_for_finalization).is_success def transfer( self, wallet: 'bittensor.wallet.Wallet', dest: str, amount: Balance, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, timeout: int = 3 * bittensor.__blocktime__, ) -> bool: r""" Transfers funds from this wallet to the destination public key address Args: wallet (bittensor.wallet.Wallet): bittensor wallet object. dest (str): destination public key address of reciever. amount (bittensor.utils.balance.Balance): amount to stake as bittensor balance wait_for_inclusion (bool): if set, waits for the extrinsic to enter a block before returning true, or returns false if the extrinsic fails to enter the block within the timeout. wait_for_finalization (bool): if set, waits for the extrinsic to be finalized on the chain before returning true, or returns false if the extrinsic fails to be finalized within the timeout. timeout (int): time that this call waits for either finalization of inclusion. Returns: success (bool): flag is true if extrinsic was finalized or uncluded in the block. If we did not wait for finalization / inclusion, the response is true. """ call = self.substrate.compose_call(call_module='Balances', call_function='transfer', call_params={ 'dest': dest, 'value': amount.rao }) extrinsic = self.substrate.create_signed_extrinsic( call=call, keypair=wallet.coldkey) return self.substrate.submit_extrinsic( extrinsic, wait_for_inclusion, wait_for_finalization).is_success def unstake( self, wallet: 'bittensor.wallet.Wallet', amount: Balance, hotkey_id: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, timeout: int = 3 * bittensor.__blocktime__, ) -> bool: r""" Removes stake into the wallet coldkey from the specified hotkey uid. Args: wallet (bittensor.wallet.Wallet): bittensor wallet object. amount (bittensor.utils.balance.Balance): amount to stake as bittensor balance hotkey_id (int): uid of hotkey to unstake from. wait_for_inclusion (bool): if set, waits for the extrinsic to enter a block before returning true, or returns false if the extrinsic fails to enter the block within the timeout. wait_for_finalization (bool): if set, waits for the extrinsic to be finalized on the chain before returning true, or returns false if the extrinsic fails to be finalized within the timeout. timeout (int): time that this call waits for either finalization of inclusion. Returns: success (bool): flag is true if extrinsic was finalized or uncluded in the block. If we did not wait for finalization / inclusion, the response is true. """ call = self.substrate.compose_call(call_module='SubtensorModule', call_function='remove_stake', call_params={ 'ammount_unstaked': amount.rao, 'hotkey': hotkey_id }) extrinsic = self.substrate.create_signed_extrinsic( call=call, keypair=wallet.coldkey) return self.substrate.submit_extrinsic( extrinsic, wait_for_inclusion, wait_for_finalization).is_success def set_weights(self, wallet: 'bittensor.wallet.Wallet', destinations, values, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, timeout: int = 3 * bittensor.__blocktime__) -> bool: r""" Sets the given weights and values on chain for wallet hotkey account. Args: wallet (bittensor.wallet.Wallet): bittensor wallet object. destinations (List[int]): uint64 uids of destination neurons. values (List[int]): u32 max encoded floating point weights. wait_for_inclusion (bool): if set, waits for the extrinsic to enter a block before returning true, or returns false if the extrinsic fails to enter the block within the timeout. wait_for_finalization (bool): if set, waits for the extrinsic to be finalized on the chain before returning true, or returns false if the extrinsic fails to be finalized within the timeout. timeout (int): time that this call waits for either finalization of inclusion. Returns: success (bool): flag is true if extrinsic was finalized or uncluded in the block. If we did not wait for finalization / inclusion, the response is true. """ call = self.substrate.compose_call(call_module='SubtensorModule', call_function='set_weights', call_params={ 'dests': destinations, 'weights': values }) extrinsic = self.substrate.create_signed_extrinsic( call=call, keypair=wallet.hotkey) return self.substrate.submit_extrinsic( extrinsic, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization).is_success def get_balance(self, address: str) -> Balance: r""" Returns the token balance for the passed ss58_address address Args: address (Substrate address format, default = 42): ss58 chain address. Return: balance (bittensor.utils.balance.Balance): account balance """ result = self.substrate.get_runtime_state(module='System', storage_function='Account', params=[address], block_hash=None) balance_info = result.get('result') if not balance_info: return Balance(0) balance = balance_info['data']['free'] return Balance(balance) def get_current_block(self) -> int: r""" Returns the current block number on the chain. Returns: block_number (int): Current chain blocknumber. """ return self.substrate.get_block_number(None) def get_block_hash(self, block_nr): return self.substrate.get_block_hash(block_nr) def get_active(self) -> List[Tuple[str, int]]: r""" Returns a list of (public key, uid) pairs one for each active peer on chain. Returns: active (List[Tuple[str, int]]): List of active peers. """ result = self.substrate.iterate_map( module='SubtensorModule', storage_function='Active', ) return result def get_stake(self, hash=None) -> List[Tuple[int, int]]: r""" Returns a list of (uid, stake) pairs one for each active peer on chain. Returns: stake (List[Tuple[int, int]]): List of stake values. """ result = self.substrate.iterate_map(module='SubtensorModule', storage_function='Stake', block_hash=hash) return result def get_last_emit(self, hash=None) -> List[Tuple[int, int]]: r""" Returns a list of (uid, last emit) pairs for each active peer on chain. Returns: last_emit (List[Tuple[int, int]]): List of last emit values. """ result = self.substrate.iterate_map(module='SubtensorModule', storage_function='LastEmit', block_hash=hash) return result def get_weight_vals(self, hash=None) -> List[Tuple[int, List[int]]]: r""" Returns a list of (uid, weight vals) pairs for each active peer on chain. Returns: weight_vals (List[Tuple[int, List[int]]]): List of weight val pairs. """ result = self.substrate.iterate_map(module='SubtensorModule', storage_function='WeightVals', block_hash=hash) return result def get_weight_uids(self, hash=None) -> List[Tuple[int, int]]: r""" Returns a list of (uid, weight uids) pairs for each active peer on chain. Returns: weight_uids (List[Tuple[int, List[int]]]): List of weight uid pairs """ result = self.substrate.iterate_map(module='SubtensorModule', storage_function='WeightUids', block_hash=hash) return result def neurons(self, hash=None) -> List[Tuple[int, dict]]: r""" Returns a list of neuron from the chain. Returns: neuron (List[Tuple[int, dict]]): List of neuron objects. """ neurons = self.substrate.iterate_map(module='SubtensorModule', storage_function='Neurons', block_hash=hash) return neurons # def __convert_neuron(self, data) -> dict: # # return dict({ # 'coldkey': data['coldkey'], # 'hotkey': data['hotkey'], # 'ip_type': int(data['ip_type']), # 'ip': int(data['ip']), # 'port': int(data['port']), # 'modality': int(data['modality']), # 'uid': int(data['uid']) # }) def get_uid_for_pubkey(self, pubkey=str) -> Optional[int]: """ Returns the uid of the peer given passed public key string. Args: pubkey (str): String encoded public key. Returns: uid (int): uid of peer with hotkey equal to passed public key. """ result = self.substrate.get_runtime_state(module='SubtensorModule', storage_function='Active', params=[pubkey]) if result['result'] is None: return None return int(result['result']) def get_neuron_for_uid(self, uid) -> dict: """ Returns the neuron metadata of the peer with the passed uid. Args: uid (int): Uid to query for metadata. Returns: metadata (Dict): Dict in list form containing metadata of associated uid. """ result = self.substrate.get_runtime_state(module='SubtensorModule', storage_function='Neurons', params=[uid]) return result['result'] def get_stake_for_uid(self, uid) -> Balance: r""" Returns the staked token amount of the peer with the passed uid. Args: uid (int): Uid to query for metadata. Returns: stake (int): Amount of staked token. """ stake = self.substrate.get_runtime_state(module='SubtensorModule', storage_function='Stake', params=[uid]) result = stake['result'] if not result: return Balance(0) return Balance(result) def weight_uids_for_uid(self, uid) -> List[int]: r""" Returns the weight uids of the peer with the passed uid. Args: uid (int): Uid to query for metadata. Returns: weight_uids (List[int]): Weight uids for passed uid. """ result = self.substrate.get_runtime_state( module='SubtensorModule', storage_function='WeightUids', params=[uid]) return result['result'] def weight_vals_for_uid(self, uid) -> List[int]: r""" Returns the weight vals of the peer with the passed uid. Args: uid (int): Uid to query for metadata. Returns: weight_vals (List[int]): Weight vals for passed uid. """ result = self.substrate.get_runtime_state( module='SubtensorModule', storage_function='WeightVals', params=[uid]) return result['result'] def get_last_emit_data_for_uid(self, uid) -> int: r""" Returns the last emit of the peer with the passed uid. Args: uid (int): Uid to query for metadata. Returns: last_emit (int): Last emit block numebr """ result = self.substrate.get_runtime_state(module='SubtensorModule', storage_function='LastEmit', params=[uid]) return result['result']