def test_iterate_map(self): def mocked_request(method, params): if method == 'chain_getRuntimeVersion': return { "jsonrpc": "2.0", "result": {"specVersion": 2023}, "id": 1 } elif method == 'state_getPairs': return { "jsonrpc": "2.0", "result": [ ['0x5f3e4907f716ac89b6347d15ececedca3ed14b45ed20d054f05e37e2542cfe70e535263148daaf49be5ddb1579b72e84524fc29e78609e3caf42e85aa118ebfe0b0ad404b5bdd25f', '0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d'] ], "id": 1 } self.substrate.rpc_request = MagicMock(side_effect=mocked_request) metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v12_hex)) metadata_decoder.decode() self.substrate.get_block_metadata = MagicMock(return_value=metadata_decoder) all_bonded_stash_ctrls = self.substrate.iterate_map( module='Staking', storage_function='Bonded', block_hash='0x7d56e0ff8d3c57f77ea6a1eeef1cd2c0157a7b24d5a1af0f802ca242617922bf' ) self.assertEqual(all_bonded_stash_ctrls, [[ '0xbe5ddb1579b72e84524fc29e78609e3caf42e85aa118ebfe0b0ad404b5bdd25f', '0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d' ]])
def setUpClass(cls): metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v10_hex)) metadata_decoder.decode() cls.substrate = SubstrateInterface(url='dummy', address_type=2, type_registry_preset='kusama') cls.substrate.get_block_metadata = MagicMock(return_value=metadata_decoder)
async def test_plaintype_call(self): def mocked_request(method, params): if method == 'chain_getRuntimeVersion': return { "jsonrpc": "2.0", "result": { "specVersion": 2023 }, "id": 1 } if method == 'state_getStorageAt': return { "jsonrpc": "2.0", "result": '0x0800000000000000482d7c0900000000020000000100000000000000000000000000020000', "id": 1 } metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v12_hex)) metadata_decoder.decode() self.substrate.get_block_metadata = MagicMock( return_value=metadata_decoder) self.substrate.rpc_request = MagicMock(side_effect=mocked_request) response = await self.substrate.get_runtime_state( module='System', storage_function='Events') self.assertEqual(len(response['result']), 2) self.assertEqual(response['result'][0]['module_id'], 'System') self.assertEqual(response['result'][0]['event_id'], 'ExtrinsicSuccess') self.assertEqual(response['result'][1]['module_id'], 'System') self.assertEqual(response['result'][1]['event_id'], 'ExtrinsicSuccess')
def test_maptype_call(self): def mocked_request(method, params): if method == 'chain_getRuntimeVersion': return { "jsonrpc": "2.0", "result": { "specVersion": 2023 }, "id": 1 } elif method == 'state_getStorageAt': return { 'jsonrpc': '2.0', 'result': '0x00000000030000c16ff28623000000000000000000000000000000000000000000000000000000c16ff286230000000000000000000000c16ff28623000000000000000000', 'id': 1 } elif method == 'chain_getHeader': return { "jsonrpc": "2.0", "result": { "digest": { "logs": [] }, "extrinsicsRoot": "0xa94148d938c7b7976abf4272dca95724d7a74da2f3649ec0bd53dc3daaedda44", "number": "0x4abaaa", "parentHash": "0xe1781813275653a970b4260298b3858b36d38e072256dad674f7c786a0cae236", "stateRoot": "0xb6aa468385c82d15b343a676b3488d9f141ac100fc548bb8a546f27a7241c44a" }, "id": 1 } self.substrate.rpc_request = MagicMock(side_effect=mocked_request) metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v12_hex)) metadata_decoder.decode() self.substrate.get_block_metadata = MagicMock( return_value=metadata_decoder) result = self.substrate.query( module='System', storage_function='Account', params=['5GNJqTPyNqANBkUVMN1LPPrxXnFouWXoe2wNSmmEoLctxiZY']) self.assertEqual( result.value, { 'data': { 'feeFrozen': 10000000000000000, 'free': 10000000000000000, 'miscFrozen': 10000000000000000, 'reserved': 0 }, 'nonce': 0, 'refcount': 3 })
def test_iterate_map(self): def mocked_request(method, params): if method == 'chain_getRuntimeVersion': return { "jsonrpc": "2.0", "result": { "specVersion": 2023 }, "id": 1 } elif method == 'state_getPairs': return { "jsonrpc": "2.0", "result": [[ '0x5f3e4907f716ac89b6347d15ececedca3ed14b45ed20d054f05e37e2542cfe70e535263148daaf49be5ddb1579b72e84524fc29e78609e3caf42e85aa118ebfe0b0ad404b5bdd25f', '0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d' ]], "id": 1 } elif method == 'chain_getHeader': return { "jsonrpc": "2.0", "result": { "digest": { "logs": [] }, "extrinsicsRoot": "0xa94148d938c7b7976abf4272dca95724d7a74da2f3649ec0bd53dc3daaedda44", "number": "0x4abaaa", "parentHash": "0xe1781813275653a970b4260298b3858b36d38e072256dad674f7c786a0cae236", "stateRoot": "0xb6aa468385c82d15b343a676b3488d9f141ac100fc548bb8a546f27a7241c44a" }, "id": 1 } self.substrate.rpc_request = MagicMock(side_effect=mocked_request) metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v12_hex)) metadata_decoder.decode() self.substrate.get_block_metadata = MagicMock( return_value=metadata_decoder) all_bonded_stash_ctrls = self.substrate.iterate_map( module='Staking', storage_function='Bonded', block_hash= '0x7d56e0ff8d3c57f77ea6a1eeef1cd2c0157a7b24d5a1af0f802ca242617922bf' ) self.assertEqual(all_bonded_stash_ctrls, [[ '0xbe5ddb1579b72e84524fc29e78609e3caf42e85aa118ebfe0b0ad404b5bdd25f', '0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d' ]])
def setUpClass(cls): RuntimeConfiguration().update_type_registry( load_type_registry_preset("default")) metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v10_hex)) metadata_decoder.decode() cls.substrate = SubstrateInterface(url='dummy', address_type=2) cls.substrate.get_block_metadata = MagicMock( return_value=metadata_decoder)
def test_metadata_v10(self): metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v10_hex)) metadata_decoder.decode() self.assertEqual(metadata_decoder.version.value, "MetadataV10Decoder") for module in metadata_decoder.metadata.modules: if module.calls: for call in module.calls: for arg in call.args: decoder_class = ScaleDecoder.get_decoder_class(arg.type) self.assertIsNotNone(decoder_class, msg='{} is not supported by metadata'.format(arg.type))
def test_plaintype_call(self): def mocked_request(method, params): if method == 'chain_getRuntimeVersion': return { "jsonrpc": "2.0", "result": { "specVersion": 2023 }, "id": 1 } elif method == 'state_getStorageAt': return { "jsonrpc": "2.0", "result": '0x0800000000000000482d7c0900000000020000000100000000000000000000000000020000', "id": 1 } elif method == 'chain_getHeader': return { "jsonrpc": "2.0", "result": { "digest": { "logs": [] }, "extrinsicsRoot": "0xa94148d938c7b7976abf4272dca95724d7a74da2f3649ec0bd53dc3daaedda44", "number": "0x4abaaa", "parentHash": "0xe1781813275653a970b4260298b3858b36d38e072256dad674f7c786a0cae236", "stateRoot": "0xb6aa468385c82d15b343a676b3488d9f141ac100fc548bb8a546f27a7241c44a" }, "id": 1 } metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v12_hex)) metadata_decoder.decode() self.substrate.get_block_metadata = MagicMock( return_value=metadata_decoder) self.substrate.rpc_request = MagicMock(side_effect=mocked_request) result = self.substrate.query(module='System', storage_function='Events') self.assertEqual(len(result.value), 2) self.assertEqual(result.value[0]['module_id'], 'System') self.assertEqual(result.value[0]['event_id'], 'ExtrinsicSuccess') self.assertEqual(result.value[1]['module_id'], 'System') self.assertEqual(result.value[1]['event_id'], 'ExtrinsicSuccess')
def get_block_metadata(self, block_hash, decode=True): response = self.__rpc_request("state_getMetadata", [block_hash]) if response.get('result'): if decode: metadata_decoder = MetadataDecoder(ScaleBytes(response.get('result'))) metadata_decoder.decode() return metadata_decoder return response else: raise SubstrateRequestException("Error occurred during retrieval of metadata")
def setUpClass(cls): cls.substrate = SubstrateInterface(url='dummy', ss58_format=42, type_registry_preset='kusama') metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v12_hex)) metadata_decoder.decode() cls.substrate.get_block_metadata = MagicMock(return_value=metadata_decoder) def mocked_request(method, params): if method == 'chain_getRuntimeVersion': return { "jsonrpc": "2.0", "result": {"specVersion": 2023}, "id": 1 } elif method == 'chain_getHeader': return { "jsonrpc": "2.0", "result": { "digest": { "logs": [ ] }, "extrinsicsRoot": "0xa94148d938c7b7976abf4272dca95724d7a74da2f3649ec0bd53dc3daaedda44", "number": "0x4abaaa", "parentHash": "0xe1781813275653a970b4260298b3858b36d38e072256dad674f7c786a0cae236", "stateRoot": "0xb6aa468385c82d15b343a676b3488d9f141ac100fc548bb8a546f27a7241c44a" }, "id": 1 } cls.substrate.rpc_request = MagicMock(side_effect=mocked_request) cls.empty_substrate = SubstrateInterface(url='dummy', ss58_format=42, type_registry_preset='kusama') def mocked_request(method, params): return {'jsonrpc': '2.0', 'result': None, 'id': 1} cls.empty_substrate.rpc_request = MagicMock(side_effect=mocked_request) cls.error_substrate = SubstrateInterface(url='dummy', ss58_format=42, type_registry_preset='kusama') def mocked_request(method, params): return {'jsonrpc': '2.0', 'error': { 'code': -32602, 'message': 'Generic error message' }, 'id': 1} cls.error_substrate.rpc_request = MagicMock(side_effect=mocked_request)
def setUpClass(cls): RuntimeConfiguration().clear_type_registry() RuntimeConfiguration().update_type_registry(load_type_registry_preset("default")) RuntimeConfiguration().update_type_registry(load_type_registry_preset("kusama")) RuntimeConfiguration().set_active_spec_version_id(1045) cls.metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v10_hex)) cls.metadata_decoder.decode()
def setUpClass(cls): cls.substrate = SubstrateWSInterface(address_type=42, type_registry_preset='kusama') metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v12_hex)) metadata_decoder.decode() cls.substrate.get_block_metadata = MagicMock( return_value=metadata_decoder) def mocked_request(method, params): if method == 'chain_getRuntimeVersion': return { "jsonrpc": "2.0", "result": { "specVersion": 2023 }, "id": 1 } cls.substrate.rpc_request = MagicMock(side_effect=mocked_request)
async def test_maptype_call(self): def mocked_request(method, params): if method == 'chain_getRuntimeVersion': return { "jsonrpc": "2.0", "result": { "specVersion": 2023 }, "id": 1 } elif method == 'state_getStorageAt': return { 'jsonrpc': '2.0', 'result': '0x00000000030000c16ff28623000000000000000000000000000000000000000000000000000000c16ff286230000000000000000000000c16ff28623000000000000000000', 'id': 1 } self.substrate.rpc_request = MagicMock(side_effect=mocked_request) metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v12_hex)) metadata_decoder.decode() self.substrate.get_block_metadata = MagicMock( return_value=metadata_decoder) response = await self.substrate.get_runtime_state( module='System', storage_function='Account', params=['5GNJqTPyNqANBkUVMN1LPPrxXnFouWXoe2wNSmmEoLctxiZY']) self.assertEqual( response['result'], { 'data': { 'feeFrozen': 10000000000000000, 'free': 10000000000000000, 'miscFrozen': 10000000000000000, 'reserved': 0 }, 'nonce': 0, 'refcount': 3 })
def test_decode_metadata_v1(self): metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v1_hex)) metadata_decoder.decode() self.assertEqual(metadata_decoder.version.value, "MetadataV1Decoder")
def on_post(self, req, resp): metadata = MetadataDecoder(ScaleBytes(req.media.get('result'))) resp.status = falcon.HTTP_200 resp.media = metadata.process()
def setUpClass(cls): cls.metadata_decoder = MetadataDecoder(ScaleBytes(metadata_v10_hex)) cls.metadata_decoder.decode()
def test_decode_invalid_metadata_v1(self): metadata_decoder = MetadataDecoder(ScaleBytes(invalid_metadata_v1_hex)) self.assertRaises(Exception, metadata_decoder.decode)
def process_metadata(self, runtime_version_data, block_hash): spec_version = runtime_version_data.get('specVersion', 0) # Check if metadata already in store if spec_version not in self.metadata_store: print('Metadata: CACHE MISS', spec_version) runtime = Runtime.query(self.db_session).get(spec_version) if runtime: metadata_decoder = MetadataDecoder( ScaleBytes(runtime.json_metadata)) metadata_decoder.decode() self.metadata_store[spec_version] = metadata_decoder else: self.db_session.begin(subtransactions=True) try: # ==== Get block Metadata from Substrate ================== substrate = SubstrateInterface(SUBSTRATE_RPC_URL) metadata_decoder = substrate.get_block_metadata(block_hash) # Store metadata in database runtime = Runtime( id=spec_version, impl_name=runtime_version_data["implName"], impl_version=runtime_version_data["implVersion"], spec_name=runtime_version_data["specName"], spec_version=spec_version, json_metadata=str(metadata_decoder.data), json_metadata_decoded=metadata_decoder.value, apis=runtime_version_data["apis"], authoring_version=runtime_version_data[ "authoringVersion"], count_call_functions=0, count_events=0, count_modules=len(metadata_decoder.metadata.modules), count_storage_functions=0, count_constants=0, count_errors=0) runtime.save(self.db_session) print('store version to db', metadata_decoder.version) if not metadata_decoder.version: # Legacy V0 fallback for module in metadata_decoder.metadata.modules: runtime_module = RuntimeModule( spec_version=spec_version, module_id=module.get_identifier(), prefix=module.prefix, name=module.get_identifier(), count_call_functions=len(module.functions or []), count_storage_functions=len(module.storage or []), count_events=0) runtime_module.save(self.db_session) if len(module.functions or []) > 0: for idx, call in enumerate(module.functions): runtime_call = RuntimeCall( spec_version=spec_version, module_id=module.get_identifier(), call_id=call.get_identifier(), index=idx, name=call.name, lookup=call.lookup, documentation='\n'.join(call.docs), count_params=len(call.args)) runtime_call.save(self.db_session) for arg in call.args: runtime_call_param = RuntimeCallParam( runtime_call_id=runtime_call.id, name=arg.name, type=arg.type) runtime_call_param.save( self.db_session) # Check if type already registered in database self.process_metadata_type( arg.type, spec_version) for event_module in metadata_decoder.metadata.events_modules: for event_index, event in enumerate( event_module.events): runtime_event = RuntimeEvent( spec_version=spec_version, module_id=event_module.name, event_id=event.name, index=event_index, name=event.name, lookup=event.lookup, documentation='\n'.join(event.docs), count_attributes=len(event.args)) runtime_event.save(self.db_session) runtime_module.count_events += 1 for arg_index, arg in enumerate(event.args): runtime_event_attr = RuntimeEventAttribute( runtime_event_id=runtime_event.id, index=arg_index, type=arg) runtime_event_attr.save(self.db_session) runtime_module.save(self.db_session) else: for module in metadata_decoder.metadata.modules: # Check if module exists if RuntimeModule.query(self.db_session).filter_by( spec_version=spec_version, module_id=module.get_identifier()).count( ) == 0: module_id = module.get_identifier() else: module_id = '{}_1'.format( module.get_identifier()) # Storage backwards compt check if module.storage and isinstance( module.storage, list): storage_functions = module.storage elif module.storage and isinstance( getattr(module.storage, 'value'), dict): storage_functions = module.storage.items else: storage_functions = [] runtime_module = RuntimeModule( spec_version=spec_version, module_id=module_id, prefix=module.prefix, name=module.name, count_call_functions=len(module.calls or []), count_storage_functions=len(storage_functions), count_events=len(module.events or []), count_constants=len(module.constants or []), count_errors=len(module.errors or []), ) runtime_module.save(self.db_session) # Update totals in runtime runtime.count_call_functions += runtime_module.count_call_functions runtime.count_events += runtime_module.count_events runtime.count_storage_functions += runtime_module.count_storage_functions runtime.count_constants += runtime_module.count_constants runtime.count_errors += runtime_module.count_errors if len(module.calls or []) > 0: for idx, call in enumerate(module.calls): runtime_call = RuntimeCall( spec_version=spec_version, module_id=module_id, call_id=call.get_identifier(), index=idx, name=call.name, lookup=call.lookup, documentation='\n'.join(call.docs), count_params=len(call.args)) runtime_call.save(self.db_session) for arg in call.args: runtime_call_param = RuntimeCallParam( runtime_call_id=runtime_call.id, name=arg.name, type=arg.type) runtime_call_param.save( self.db_session) # Check if type already registered in database self.process_metadata_type( arg.type, spec_version) if len(module.events or []) > 0: for event_index, event in enumerate( module.events): runtime_event = RuntimeEvent( spec_version=spec_version, module_id=module_id, event_id=event.name, index=event_index, name=event.name, lookup=event.lookup, documentation='\n'.join(event.docs), count_attributes=len(event.args)) runtime_event.save(self.db_session) for arg_index, arg in enumerate( event.args): runtime_event_attr = RuntimeEventAttribute( runtime_event_id=runtime_event.id, index=arg_index, type=arg) runtime_event_attr.save( self.db_session) if len(storage_functions) > 0: for idx, storage in enumerate( storage_functions): # Determine type type_hasher = None type_key1 = None type_key2 = None type_value = None type_is_linked = None type_key2hasher = None if storage.type.get('PlainType'): type_value = storage.type.get( 'PlainType') elif storage.type.get('MapType'): type_hasher = storage.type[ 'MapType'].get('hasher') type_key1 = storage.type[ 'MapType'].get('key') type_value = storage.type[ 'MapType'].get('value') type_is_linked = storage.type[ 'MapType'].get('isLinked', False) elif storage.type.get('DoubleMapType'): type_hasher = storage.type[ 'DoubleMapType'].get('hasher') type_key1 = storage.type[ 'DoubleMapType'].get('key1') type_key2 = storage.type[ 'DoubleMapType'].get('key2') type_value = storage.type[ 'DoubleMapType'].get('value') type_key2hasher = storage.type[ 'DoubleMapType'].get('key2Hasher') runtime_storage = RuntimeStorage( spec_version=spec_version, module_id=module_id, index=idx, name=storage.name, lookup=None, default=storage.fallback, modifier=storage.modifier, type_hasher=type_hasher, type_key1=type_key1, type_key2=type_key2, type_value=type_value, type_is_linked=type_is_linked, type_key2hasher=type_key2hasher, documentation='\n'.join(storage.docs)) runtime_storage.save(self.db_session) # Check if types already registered in database self.process_metadata_type( type_value, spec_version) if type_key1: self.process_metadata_type( type_key1, spec_version) if type_key2: self.process_metadata_type( type_key2, spec_version) if len(module.constants or []) > 0: for idx, constant in enumerate( module.constants): # Decode value try: value_obj = ScaleDecoder.get_decoder_class( constant.type, ScaleBytes( constant.constant_value)) value_obj.decode() value = value_obj.serialize() except ValueError: value = constant.constant_value except RemainingScaleBytesNotEmptyException: value = constant.constant_value except NotImplementedError: value = constant.constant_value runtime_constant = RuntimeConstant( spec_version=spec_version, module_id=module_id, index=idx, name=constant.name, type=constant.type, value=value, documentation='\n'.join(constant.docs)) runtime_constant.save(self.db_session) # Check if types already registered in database self.process_metadata_type( constant.type, spec_version) if len(module.errors or []) > 0: for idx, error in enumerate(module.errors): runtime_error = RuntimeErrorMessage( spec_version=spec_version, module_id=module_id, index=idx, name=error.name, documentation='\n'.join(error.docs)) runtime_error.save(self.db_session) runtime.save(self.db_session) self.db_session.commit() # Put in local store self.metadata_store[spec_version] = metadata_decoder except SQLAlchemyError as e: self.db_session.rollback()
def setUpClass(cls): RuntimeConfiguration().update_type_registry(load_type_registry_preset("default")) cls.metadata_decoder = MetadataDecoder(ScaleBytes(kusama_metadata_hex)) cls.metadata_decoder.decode()
def test_balance_check_rpc_breakdown(self): """ This test breaks down every RPC call that happens to get balance It is here only for the purpose of understanding how everything works Steps: Step 1: Initial preparatory steps - Connect to node - RPC Call 1: chain_getFinalisedHead - Get current finalised block hash Step 2: Get MetaData for the current block hash - RPC Call 2: state_getMetadata - Decode the result using MetadataDecoder and ScaleBytes Step 3: Prepare hashed data to make the next RPC call - We need three params hashed into one single hash string - Storage Module, Storage Function and Payload - Each param is encoded and hashed using various hashers Hasher for Payload is obtained from the dict at the bottom of the docstring Step 4: Get data at the storage hash prepared in Step 3 - RPC Call 3: state_getStorageAt - Decode the data using ScaleDecoder's special class This is a key information used to encode/hash/decode in the entire function { 'name': 'Account', 'modifier': 'Default', 'type': { 'MapType': { 'hasher': 'Blake2_128Concat', 'key': 'AccountId', 'value': 'AccountInfo<Index, AccountData>', 'isLinked': False } }, } """ # Test data test_address = "HsgNgA5sgjuKxGUeaZPJE8rRn9RuixjvnPkVLFUYLEpj15G" ### STEP 1 # Connect to the node substrate = SubstrateInterface( url=settings.NODE_URL, address_type=2, type_registry_preset="kusama", ) # Get finalised block hash block_hash = substrate.rpc_request("chain_getFinalisedHead", []).get("result") if not block_hash: raise Exception( "ERROR: RPC call for chain_getFinalisedHead failed") print("\n\n") print("-" * 100) print(f"BLOCK HASH: {block_hash}") ### STEP 2 # Get metadata decoder, this is needed later metadata_result = substrate.rpc_request("state_getMetadata", [block_hash]).get("result") if not metadata_result: raise Exception("ERROR: RPC call for state_getMetadata failed") metadata_encoded = MetadataDecoder(ScaleBytes(metadata_result)) metadata = metadata_encoded.decode() ### STEP 3 # This comes from the metadata dict in the docstring `type` -> `MapType` -> `key` map_type_key = "AccountId" test_address_modified = "0x{}".format(ss58.ss58_decode( test_address, 2)) print(f"TEST ADDRESS SS58 DECODED: {test_address_modified}") scale_decoder = ScaleDecoder.get_decoder_class(map_type_key) test_address_encoded = scale_decoder.encode(test_address_modified) print(f"TEST ADDRESS ENCODED: {test_address_encoded}") # Why blake2_128_concat? Because metadata dict in the docstring `type` -> `MapType` -> `hasher` test_address_hash = hasher.blake2_128_concat(test_address_encoded.data) # `System` is our module and `Account` if our function for this example storage_module = "System" storage_function = "Account" storage_module_hash = hasher.xxh128(storage_module.encode()) storage_function_hash = hasher.xxh128(storage_function.encode()) print(f"STORAGE MODULE: {storage_module}") print(f"STORAGE MODULE ENCODED: {storage_module.encode()}") print(f"STORAGE MODULE ENCODED HASHED: {storage_module_hash}") print(f"STORAGE FUNCTION: {storage_function}") print(f"STORAGE FUNCTION ENCODED: {storage_function.encode()}") print(f"STORAGE FUNCTION ENCODED HASHED: {storage_function_hash}") print(f"TEST ADDRESS: {test_address}") print(f"TEST ADDRESS SS58 DECODED: {test_address_modified}") print(f"TEST ADDRESS ENCODED: {test_address_encoded}") print(f"TEST ADDRESS ENCODED HASHED: {test_address_hash}") storage_hash = (f"0x" f"{storage_module_hash}" f"{storage_function_hash}" f"{test_address_hash}") print(f"COMBINED HASH: {storage_hash}") ### STEP 4 response = substrate.rpc_request("state_getStorageAt", [storage_hash, block_hash]) result = response.get("result") if not result: raise Exception("ERROR: RPC call for state_getStorageAt failed") print(f"RPC RESULT: {result}") print("-" * 100) print("DECODING ABOVE RESULT ... ...") # This is again extracted from the metadata dict in the docstring `type` -> `MapType` -> `value` return_type = "AccountInfo<Index, AccountData>" result_decoded = ScaleDecoder.get_decoder_class( return_type, ScaleBytes(result), metadata=metadata).decode() print(f"RESULT DECODED: {result_decoded}")