def test_compact_u32_larger_than_4bytes(self): obj = ScaleDecoder.get_decoder_class('Compact<u32>') obj.encode(150000000000000) self.assertEqual(str(obj.data), "0x0b0060b7986c88")
def test_compact_u32_invalid(self): obj = ScaleDecoder.get_decoder_class('Compact<u32>', ScaleBytes("0x")) self.assertRaises(InvalidScaleTypeValueException, obj.decode)
def test_bitvec_encode_empty_list(self): obj = ScaleDecoder.get_decoder_class('BitVec') data = obj.encode([]) self.assertEqual(data.to_hex(), '0x00')
def test_compact_u32_1byte(self): obj = ScaleDecoder.get_decoder_class('Compact<u32>', ScaleBytes("0x18")) obj.decode() self.assertEqual(obj.value, 6)
def test_bitvec_decode_size2(self): obj = ScaleDecoder.get_decoder_class('BitVec', ScaleBytes('0x0803')) obj.decode() self.assertEqual(obj.value, [True, True])
def test_custom_tuple(self): obj = ScaleDecoder.get_decoder_class('(u8,u8)', ScaleBytes("0x0102")) self.assertEqual({'col1': 1, 'col2': 2}, obj.decode())
def test_era_invalid_decode(self): obj = ScaleDecoder.get_decoder_class('Era', ScaleBytes('0x0101')) self.assertRaises(ValueError, obj.decode)
def process_metadata(self, runtime_version_data, block_hash): spec_version = runtime_version_data.get('specVersion', 0) # Check if metadata already in store if spec_version not in self.metadata_store: print('Metadata: CACHE MISS', spec_version) runtime = Runtime.query(self.db_session).get(spec_version) if runtime: metadata_decoder = MetadataDecoder( ScaleBytes(runtime.json_metadata)) metadata_decoder.decode() self.metadata_store[spec_version] = metadata_decoder else: self.db_session.begin(subtransactions=True) try: # ==== Get block Metadata from Substrate ================== substrate = SubstrateInterface(SUBSTRATE_RPC_URL) metadata_decoder = substrate.get_block_metadata(block_hash) # Store metadata in database runtime = Runtime( id=spec_version, impl_name=runtime_version_data["implName"], impl_version=runtime_version_data["implVersion"], spec_name=runtime_version_data["specName"], spec_version=spec_version, json_metadata=str(metadata_decoder.data), json_metadata_decoded=metadata_decoder.value, apis=runtime_version_data["apis"], authoring_version=runtime_version_data[ "authoringVersion"], count_call_functions=0, count_events=0, count_modules=len(metadata_decoder.metadata.modules), count_storage_functions=0) runtime.save(self.db_session) print('store version to db', metadata_decoder.version) if not metadata_decoder.version: # Legacy V0 fallback for module in metadata_decoder.metadata.modules: runtime_module = RuntimeModule( spec_version=spec_version, module_id=module.get_identifier(), prefix=module.prefix, name=module.get_identifier(), count_call_functions=len(module.functions or []), count_storage_functions=len(module.storage or []), count_events=0) runtime_module.save(self.db_session) if len(module.functions or []) > 0: for idx, call in enumerate(module.functions): runtime_call = RuntimeCall( spec_version=spec_version, module_id=module.get_identifier(), call_id=call.get_identifier(), index=idx, name=call.name, lookup=call.lookup, documentation='\n'.join(call.docs), count_params=len(call.args)) runtime_call.save(self.db_session) for arg in call.args: runtime_call_param = RuntimeCallParam( runtime_call_id=runtime_call.id, name=arg.name, type=arg.type) runtime_call_param.save( self.db_session) # Check if type already registered in database self.process_metadata_type( arg.type, spec_version) for event_module in metadata_decoder.metadata.events_modules: for event_index, event in enumerate( event_module.events): runtime_event = RuntimeEvent( spec_version=spec_version, module_id=event_module.name, event_id=event.name, index=event_index, name=event.name, lookup=event.lookup, documentation='\n'.join(event.docs), count_attributes=len(event.args)) runtime_event.save(self.db_session) runtime_module.count_events += 1 for arg_index, arg in enumerate(event.args): runtime_event_attr = RuntimeEventAttribute( runtime_event_id=runtime_event.id, index=arg_index, type=arg) runtime_event_attr.save(self.db_session) runtime_module.save(self.db_session) else: for module in metadata_decoder.metadata.modules: # Check if module exists if RuntimeModule.query(self.db_session).filter_by( spec_version=spec_version, module_id=module.get_identifier()).count( ) == 0: module_id = module.get_identifier() else: module_id = '{}_1'.format( module.get_identifier()) # Storage backwards compt check if module.storage and isinstance( module.storage, list): storage_functions = module.storage elif module.storage and isinstance( getattr(module.storage, 'value'), dict): storage_functions = module.storage.items else: storage_functions = [] runtime_module = RuntimeModule( spec_version=spec_version, module_id=module_id, prefix=module.prefix, name=module.name, count_call_functions=len(module.calls or []), count_storage_functions=len(storage_functions), count_events=len(module.events or [])) runtime_module.save(self.db_session) # Update totals in runtime runtime.count_call_functions += runtime_module.count_call_functions runtime.count_events += runtime_module.count_events runtime.count_storage_functions += runtime_module.count_storage_functions if len(module.calls or []) > 0: for idx, call in enumerate(module.calls): runtime_call = RuntimeCall( spec_version=spec_version, module_id=module_id, call_id=call.get_identifier(), index=idx, name=call.name, lookup=call.lookup, documentation='\n'.join(call.docs), count_params=len(call.args)) runtime_call.save(self.db_session) for arg in call.args: runtime_call_param = RuntimeCallParam( runtime_call_id=runtime_call.id, name=arg.name, type=arg.type) runtime_call_param.save( self.db_session) # Check if type already registered in database self.process_metadata_type( arg.type, spec_version) if len(module.events or []) > 0: for event_index, event in enumerate( module.events): runtime_event = RuntimeEvent( spec_version=spec_version, module_id=module_id, event_id=event.name, index=event_index, name=event.name, lookup=event.lookup, documentation='\n'.join(event.docs), count_attributes=len(event.args)) runtime_event.save(self.db_session) for arg_index, arg in enumerate( event.args): runtime_event_attr = RuntimeEventAttribute( runtime_event_id=runtime_event.id, index=arg_index, type=arg) runtime_event_attr.save( self.db_session) if len(storage_functions) > 0: for idx, storage in enumerate( storage_functions): # Determine type type_hasher = None type_key1 = None type_key2 = None type_value = None type_is_linked = None type_key2hasher = None if storage.type.get('PlainType'): type_value = storage.type.get( 'PlainType') elif storage.type.get('MapType'): type_hasher = storage.type[ 'MapType'].get('hasher') type_key1 = storage.type[ 'MapType'].get('key') type_value = storage.type[ 'MapType'].get('value') type_is_linked = storage.type[ 'MapType'].get('isLinked', False) elif storage.type.get('DoubleMapType'): type_hasher = storage.type[ 'DoubleMapType'].get('hasher') type_key1 = storage.type[ 'DoubleMapType'].get('key1') type_key2 = storage.type[ 'DoubleMapType'].get('key2') type_value = storage.type[ 'DoubleMapType'].get('value') type_key2hasher = storage.type[ 'DoubleMapType'].get('key2Hasher') runtime_storage = RuntimeStorage( spec_version=spec_version, module_id=module_id, index=idx, name=storage.name, lookup=None, default=storage.fallback, modifier=storage.modifier, type_hasher=type_hasher, type_key1=type_key1, type_key2=type_key2, type_value=type_value, type_is_linked=type_is_linked, type_key2hasher=type_key2hasher, documentation='\n'.join(storage.docs)) runtime_storage.save(self.db_session) # Check if types already registered in database self.process_metadata_type( type_value, spec_version) if type_key1: self.process_metadata_type( type_key1, spec_version) if type_key2: self.process_metadata_type( type_key2, spec_version) if len(module.constants or []) > 0: for idx, constant in enumerate( module.constants): # Decode value try: value_obj = ScaleDecoder.get_decoder_class( constant.type, ScaleBytes( constant.constant_value)) value_obj.decode() value = value_obj.serialize() except ValueError: value = constant.constant_value except RemainingScaleBytesNotEmptyException: value = constant.constant_value except NotImplementedError: value = constant.constant_value runtime_constant = RuntimeConstant( spec_version=spec_version, module_id=module_id, index=idx, name=constant.name, type=constant.type, value=value, documentation='\n'.join(constant.docs)) runtime_constant.save(self.db_session) # Check if types already registered in database self.process_metadata_type( constant.type, spec_version) runtime.save(self.db_session) self.db_session.commit() # Put in local store self.metadata_store[spec_version] = metadata_decoder except SQLAlchemyError as e: self.db_session.rollback()
def test_b_seller_approve_as_multi_and_admin_as_multi(self): print("Seller broadcasts approve as multi") original_balance = utils.get_balance_for_address( self.buyer_address).get("free") # Function that hashes call with blake2_256 def hash_call(call): call = bytes.fromhex(str(call.data)[2:]) return f"0x{blake2_256(call)}" hashed_call = hash_call(self.inner_call) outer_call = ScaleDecoder.get_decoder_class( "Call", metadata=substrate.metadata_decoder) outer_call.encode({ "call_module": "Utility", "call_function": "approve_as_multi", "call_args": { "call_hash": hashed_call, "maybe_timepoint": None, "other_signatories": sorted([self.admin_address, self.buyer_address]), "threshold": 2, }, }) response = substrate.get_runtime_state("System", "Account", [self.seller_address]) assert response.get("result") nonce = response["result"].get("nonce", 0) genesis_hash = substrate.get_block_hash(0) era = "00" # Create signature payload signature_payload = ScaleDecoder.get_decoder_class( "ExtrinsicPayloadValue") signature_payload.encode({ "call": str(outer_call.data), "era": era, "nonce": nonce, "tip": 0, "specVersion": substrate.runtime_version, "genesisHash": genesis_hash, "blockHash": genesis_hash, }) # Sign payload data = str(signature_payload.data) if data[0:2] == "0x": data = bytes.fromhex(data[2:]) else: data = data.encode() signature = sr25519.sign( (bytes.fromhex(self.seller_pub), bytes.fromhex(self.seller_priv)), data) signature = "0x{}".format(signature.hex()) # Create extrinsic extrinsic = ScaleDecoder.get_decoder_class( "Extrinsic", metadata=substrate.metadata_decoder) extrinsic.encode({ "account_id": "0x" + self.seller_pub, "signature_version": 1, "signature": signature, "call_function": outer_call.value["call_function"], "call_module": outer_call.value["call_module"], "call_args": outer_call.value["call_args"], "nonce": nonce, "era": "00", "tip": 0, }) response = rpc_subscription( "author_submitAndWatchExtrinsic", [str(extrinsic.data)], substrate.request_id, settings.NODE_URL, ) extrinsic_hash = utils.get_extrinsic_hash(str(extrinsic.data)) print("extrinsic_hash", extrinsic_hash) extrinsic_time_point = utils.get_time_point(response, extrinsic_hash) print("extrinsic_time_point", extrinsic_time_point) events = utils.get_extrinsic_events(extrinsic_time_point) print(events) self.assertTrue("NewMultiSig" and "ExtrinsicSuccess" in str(events)) new_balance = utils.get_balance_for_address( self.buyer_address).get("free") # Make sure no funds move to buyer yet self.assertEqual(new_balance, original_balance) ############################################################# ## Second Txn: AsMulti by Admin # ############################################################# print("Admin finalises release of funds") original_balance = utils.get_balance_for_address( self.buyer_address).get("free") outer_call = ScaleDecoder.get_decoder_class( "Call", metadata=substrate.metadata_decoder) print("PREV TIMEPOINT: ", extrinsic_time_point) outer_call.encode({ "call_module": "Utility", "call_function": "as_multi", "call_args": { "call": self.inner_call.serialize(), "maybe_timepoint": { "height": extrinsic_time_point[0], "index": extrinsic_time_point[1], }, "other_signatories": sorted([self.buyer_address, self.seller_address]), # IMPORTANT: has to be sorted "threshold": 2, }, }) response = substrate.get_runtime_state("System", "Account", [self.admin_address]) assert response.get("result") nonce = response["result"].get("nonce", 0) genesis_hash = substrate.get_block_hash(0) era = "00" # Create signature payload signature_payload = ScaleDecoder.get_decoder_class( "ExtrinsicPayloadValue") signature_payload.encode({ "call": str(outer_call.data), "era": era, "nonce": nonce, "tip": 0, "specVersion": substrate.runtime_version, "genesisHash": genesis_hash, "blockHash": genesis_hash, }) # Sign payload data = str(signature_payload.data) if data[0:2] == "0x": data = bytes.fromhex(data[2:]) else: data = data.encode() signature = sr25519.sign( (bytes.fromhex(self.admin_pub), bytes.fromhex(self.admin_priv)), data) signature = "0x{}".format(signature.hex()) # Create extrinsic extrinsic = ScaleDecoder.get_decoder_class( "Extrinsic", metadata=substrate.metadata_decoder) extrinsic.encode({ "account_id": "0x" + self.admin_pub, "signature_version": 1, "signature": signature, "call_function": outer_call.value["call_function"], "call_module": outer_call.value["call_module"], "call_args": outer_call.value["call_args"], "nonce": nonce, "era": "00", "tip": 0, }) response = rpc_subscription( "author_submitAndWatchExtrinsic", [str(extrinsic.data)], substrate.request_id, settings.NODE_URL, ) extrinsic_hash = utils.get_extrinsic_hash(str(extrinsic.data)) print("extrinsic_hash", extrinsic_hash) extrinsic_time_point = utils.get_time_point(response, extrinsic_hash) print("extrinsic_time_point", extrinsic_time_point) events = utils.get_extrinsic_events(extrinsic_time_point) print(events) self.assertTrue("MultiSigExecuted" and "ExtrinsicSuccess" in str(events)) new_balance = utils.get_balance_for_address( self.buyer_address).get("free") self.assertEqual(new_balance, original_balance + settings.trade_value)
def test_str_representation(self): obj = ScaleDecoder.get_decoder_class('Bytes', ScaleBytes("0x1054657374")) obj.decode() self.assertEqual(str(obj), "Test")
def process_metadata(self, spec_version, block_hash): # Check if metadata already stored runtime = Runtime.query(self.db_session).get(spec_version) if runtime: if spec_version in self.substrate.metadata_cache: self.metadata_store[ spec_version] = self.substrate.metadata_cache[spec_version] else: self.metadata_store[ spec_version] = self.substrate.get_block_metadata( block_hash=block_hash) else: print('Metadata: CACHE MISS', spec_version) runtime_version_data = self.substrate.get_block_runtime_version( block_hash) self.db_session.begin(subtransactions=True) try: # Store metadata in database runtime = Runtime( id=spec_version, impl_name=runtime_version_data["implName"], impl_version=runtime_version_data["implVersion"], spec_name=runtime_version_data["specName"], spec_version=spec_version, json_metadata=str(self.substrate.metadata_decoder.data), json_metadata_decoded=self.substrate.metadata_decoder. value, apis=runtime_version_data["apis"], authoring_version=runtime_version_data["authoringVersion"], count_call_functions=0, count_events=0, count_modules=len( self.substrate.metadata_decoder.metadata.modules), count_storage_functions=0, count_constants=0, count_errors=0) runtime.save(self.db_session) print('store version to db', self.substrate.metadata_decoder.version) for module_index, module in enumerate( self.substrate.metadata_decoder.metadata.modules): if hasattr(module, 'index'): module_index = module.index # Check if module exists if RuntimeModule.query(self.db_session).filter_by( spec_version=spec_version, module_id=module.get_identifier()).count() == 0: module_id = module.get_identifier() else: module_id = '{}_1'.format(module.get_identifier()) # Storage backwards compt check if module.storage and isinstance(module.storage, list): storage_functions = module.storage elif module.storage and isinstance( getattr(module.storage, 'value'), dict): storage_functions = module.storage.items else: storage_functions = [] runtime_module = RuntimeModule( spec_version=spec_version, module_id=module_id, prefix=module.prefix, name=module.name, count_call_functions=len(module.calls or []), count_storage_functions=len(storage_functions), count_events=len(module.events or []), count_constants=len(module.constants or []), count_errors=len(module.errors or []), ) runtime_module.save(self.db_session) # Update totals in runtime runtime.count_call_functions += runtime_module.count_call_functions runtime.count_events += runtime_module.count_events runtime.count_storage_functions += runtime_module.count_storage_functions runtime.count_constants += runtime_module.count_constants runtime.count_errors += runtime_module.count_errors if len(module.calls or []) > 0: for idx, call in enumerate(module.calls): runtime_call = RuntimeCall( spec_version=spec_version, module_id=module_id, call_id=call.get_identifier(), index=idx, name=call.name, lookup=call.lookup, documentation='\n'.join(call.docs), count_params=len(call.args)) runtime_call.save(self.db_session) for arg in call.args: runtime_call_param = RuntimeCallParam( runtime_call_id=runtime_call.id, name=arg.name, type=arg.type) runtime_call_param.save(self.db_session) if len(module.events or []) > 0: for event_index, event in enumerate(module.events): runtime_event = RuntimeEvent( spec_version=spec_version, module_id=module_id, event_id=event.name, index=event_index, name=event.name, lookup=event.lookup, documentation='\n'.join(event.docs), count_attributes=len(event.args)) runtime_event.save(self.db_session) for arg_index, arg in enumerate(event.args): runtime_event_attr = RuntimeEventAttribute( runtime_event_id=runtime_event.id, index=arg_index, type=arg) runtime_event_attr.save(self.db_session) if len(storage_functions) > 0: for idx, storage in enumerate(storage_functions): # Determine type type_hasher = None type_key1 = None type_key2 = None type_value = None type_is_linked = None type_key2hasher = None if storage.type.get('PlainType'): type_value = storage.type.get('PlainType') elif storage.type.get('MapType'): type_hasher = storage.type['MapType'].get( 'hasher') type_key1 = storage.type['MapType'].get('key') type_value = storage.type['MapType'].get( 'value') type_is_linked = storage.type['MapType'].get( 'isLinked', False) elif storage.type.get('DoubleMapType'): type_hasher = storage.type[ 'DoubleMapType'].get('hasher') type_key1 = storage.type['DoubleMapType'].get( 'key1') type_key2 = storage.type['DoubleMapType'].get( 'key2') type_value = storage.type['DoubleMapType'].get( 'value') type_key2hasher = storage.type[ 'DoubleMapType'].get('key2Hasher') runtime_storage = RuntimeStorage( spec_version=spec_version, module_id=module_id, index=idx, name=storage.name, lookup=None, default=storage.fallback, modifier=storage.modifier, type_hasher=type_hasher, storage_key=xxh128(module.prefix.encode()) + xxh128(storage.name.encode()), type_key1=type_key1, type_key2=type_key2, type_value=type_value, type_is_linked=type_is_linked, type_key2hasher=type_key2hasher, documentation='\n'.join(storage.docs)) runtime_storage.save(self.db_session) if len(module.constants or []) > 0: for idx, constant in enumerate(module.constants): # Decode value try: value_obj = ScaleDecoder.get_decoder_class( constant.type, ScaleBytes(constant.constant_value)) value_obj.decode() value = value_obj.serialize() except ValueError: value = constant.constant_value except RemainingScaleBytesNotEmptyException: value = constant.constant_value except NotImplementedError: value = constant.constant_value if type(value) is list or type(value) is dict: value = json.dumps(value) runtime_constant = RuntimeConstant( spec_version=spec_version, module_id=module_id, index=idx, name=constant.name, type=constant.type, value=value, documentation='\n'.join(constant.docs)) runtime_constant.save(self.db_session) if len(module.errors or []) > 0: for idx, error in enumerate(module.errors): runtime_error = RuntimeErrorMessage( spec_version=spec_version, module_id=module_id, module_index=module_index, index=idx, name=error.name, documentation='\n'.join(error.docs)) runtime_error.save(self.db_session) runtime.save(self.db_session) # Process types for runtime_type_data in list( self.substrate.get_type_registry( block_hash=block_hash).values()): runtime_type = RuntimeType( spec_version=runtime_type_data["spec_version"], type_string=runtime_type_data["type_string"], decoder_class=runtime_type_data["decoder_class"], is_primitive_core=runtime_type_data[ "is_primitive_core"], is_primitive_runtime=runtime_type_data[ "is_primitive_runtime"]) runtime_type.save(self.db_session) self.db_session.commit() # Put in local store self.metadata_store[ spec_version] = self.substrate.metadata_decoder except SQLAlchemyError as e: self.db_session.rollback()
def test_no_more_bytes_available(self): obj = ScaleDecoder.get_decoder_class('[u8; 4]', ScaleBytes("0x010203")) self.assertRaises(RemainingScaleBytesNotEmptyException, obj.decode, False)
def test_scale_decoder_remaining_bytes(self): obj = ScaleDecoder.get_decoder_class('[u8; 3]', ScaleBytes("0x010203")) self.assertEqual(obj.get_remaining_bytes(), b"\x01\x02\x03")
def test_bytes_data_format(self): obj = ScaleDecoder.get_decoder_class('Compact<u32>', ScaleBytes(b"\x02\x09\x3d\x00")) obj.decode() self.assertEqual(obj.value, 1000000)
def test_dynamic_fixed_array_type_encode(self): obj = ScaleDecoder.get_decoder_class('[u32; 1]') self.assertEqual('0x0100000002000000', str(obj.encode([1, 2]))) obj = ScaleDecoder.get_decoder_class('[u8; 3]') self.assertEqual('0x010203', str(obj.encode('0x010203')))
def test_a_seller_fund_escrow(self): print("Seller places funds into escrow") call = ScaleDecoder.get_decoder_class( "Call", metadata=substrate.metadata_decoder) call.encode({ "call_module": "Balances", "call_function": "transfer", "call_args": { "dest": settings.escrow_address, "value": settings.trade_value, }, }) response = substrate.get_runtime_state("System", "Account", [self.seller_address]) assert response.get("result") nonce = response["result"].get("nonce", 0) genesis_hash = substrate.get_block_hash(0) era = "00" # Create signature payload signature_payload = ScaleDecoder.get_decoder_class( "ExtrinsicPayloadValue") signature_payload.encode({ "call": str(call.data), "era": era, "nonce": nonce, "tip": 0, "specVersion": substrate.runtime_version, "genesisHash": genesis_hash, "blockHash": genesis_hash, }) # Sign payload data = str(signature_payload.data) if data[0:2] == "0x": data = bytes.fromhex(data[2:]) else: data = data.encode() signature = sr25519.sign( (bytes.fromhex(self.seller_pub), bytes.fromhex(self.seller_priv)), data) signature = "0x{}".format(signature.hex()) # Create extrinsic extrinsic = ScaleDecoder.get_decoder_class( "Extrinsic", metadata=substrate.metadata_decoder) extrinsic.encode({ "account_id": "0x" + self.seller_pub, "signature_version": 1, "signature": signature, "call_function": call.value["call_function"], "call_module": call.value["call_module"], "call_args": call.value["call_args"], "nonce": nonce, "era": "00", "tip": 0, }) self.assertEqual(len(str(extrinsic.data)), 288) # Broadcast like this response = rpc_subscription( "author_submitAndWatchExtrinsic", [str(extrinsic.data)], substrate.request_id, settings.NODE_URL, ) extrinsic_hash = utils.get_extrinsic_hash(str(extrinsic.data)) print("extrinsic_hash", extrinsic_hash) extrinsic_time_point = utils.get_time_point(response, extrinsic_hash) print("extrinsic_time_point", extrinsic_time_point) events = utils.get_extrinsic_events(extrinsic_time_point) print(events) self.assertTrue("Deposit" and "ExtrinsicSuccess" in str(events)) new_balance = utils.get_balance_for_address( settings.escrow_address).get("free") self.assertTrue(new_balance >= 0)
def test_invalid_fixed_array_type_encode(self): obj = ScaleDecoder.get_decoder_class('[u8; 3]') self.assertRaises(ValueError, obj.encode, '0x0102') obj = ScaleDecoder.get_decoder_class('[u32; 3]') self.assertRaises(ValueError, obj.encode, '0x0102')
def test_compact_bool_false(self): obj = ScaleDecoder.get_decoder_class('bool', ScaleBytes("0x00")) obj.decode() self.assertEqual(obj.value, False)
def test_era_immortal(self): obj = ScaleDecoder.get_decoder_class('Era', ScaleBytes('0x00')) obj.decode() self.assertEqual(obj.value, '00') self.assertIsNone(obj.period) self.assertIsNone(obj.phase)
def test_compact_bool_invalid(self): obj = ScaleDecoder.get_decoder_class('bool', ScaleBytes("0x02")) self.assertRaises(InvalidScaleTypeValueException, obj.decode)
def test_compact_u32(self): obj = ScaleDecoder.get_decoder_class('Compact<u32>', ScaleBytes("0x02093d00")) obj.decode() self.assertEqual(obj.value, 1000000)
def test_implied_struct(self): obj = ScaleDecoder.get_decoder_class('(Compact<u32>,Compact<u32>)', ScaleBytes("0x0c00")) obj.decode() self.assertEqual(obj.value, {"col1": 3, "col2": 0})
def test_compact_u32_remaining_bytes(self): obj = ScaleDecoder.get_decoder_class('Compact<u32>', ScaleBytes("0x02093d0001")) self.assertRaises(RemainingScaleBytesNotEmptyException, obj.decode)
def test_moment(self): obj = ScaleDecoder.get_decoder_class('Compact<Moment>', ScaleBytes("0x03d68b655c")) obj.decode() self.assertEqual(obj.value, datetime.datetime(2019, 2, 14, 15, 40, 6))
def test_bitvec_decode_size_2bytes(self): obj = ScaleDecoder.get_decoder_class('BitVec', ScaleBytes('0x28fd02')) obj.decode() self.assertEqual( obj.value, [True, False, True, True, True, True, True, True, False, True])
def test_balance(self): obj = ScaleDecoder.get_decoder_class( 'Compact<Balance>', ScaleBytes("0x130080cd103d71bc22")) obj.decode() self.assertEqual(obj.value, 2503000000000000000)
def test_bitvec_encode_list4(self): obj = ScaleDecoder.get_decoder_class('BitVec') data = obj.encode( [True, False, True, True, True, True, True, True, False, True]) self.assertEqual(data.to_hex(), '0x28fd02')
def test_dynamic_fixed_array_type_encode_u8(self): obj = ScaleDecoder.get_decoder_class('[u8; 1]') self.assertEqual('0x01', str(obj.encode('0x01')))
def test_u16(self): obj = ScaleDecoder.get_decoder_class('u16', ScaleBytes("0x2efb")) obj.decode() self.assertEqual(obj.value, 64302)
def test_compact_u32_4bytes(self): obj = ScaleDecoder.get_decoder_class('Compact<u32>') obj.encode(1000000) self.assertEqual(str(obj.data), "0x02093d00")