def _get_last_block(self, node_interface: SubstrateInterface) -> BlockNumber: """Return the chain height. May raise: - RemoteError if there is an error """ log.debug(f'{self.chain} querying last block', url=node_interface.url) try: last_block = node_interface.get_block_number( block_hash=node_interface.get_chain_head(), ) if last_block is None: # For some reason a node can rarely return None as last block raise SubstrateRequestException( f'{self.chain} node failed to request last block. Returned None', ) except ( requests.exceptions.RequestException, SubstrateRequestException, WebSocketException, ValueError, ) as e: message = ( f'{self.chain} failed to request last block ' f'at endpoint: {node_interface.url} due to: {str(e)}.' ) log.error(message) raise RemoteError(message) from e log.debug(f'{self.chain} last block', last_block=last_block) return BlockNumber(last_block)
def on_get(self, req, resp): sequencer_task = Status.get_status(self.session, 'SEQUENCER_TASK_ID') integrity_head = Status.get_status(self.session, 'INTEGRITY_HEAD') sequencer_head = self.session.query(func.max(BlockTotal.id)).one()[0] best_block = Block.query(self.session).filter_by( id=self.session.query(func.max(Block.id)).one()[0]).first() if best_block: best_block_datetime = best_block.datetime.replace(tzinfo=pytz.UTC).timestamp() * 1000 best_block_nr = best_block.id else: best_block_datetime = None best_block_nr = None substrate = SubstrateInterface(SUBSTRATE_RPC_URL) chain_head_block_id = substrate.get_block_number(substrate.get_chain_head()) chain_finalized_block_id = substrate.get_block_number(substrate.get_chain_finalised_head()) resp.media = { 'best_block_datetime': best_block_datetime, 'best_block_nr': best_block_nr, 'sequencer_task': sequencer_task.value, 'sequencer_head': sequencer_head, 'integrity_head': int(integrity_head.value), 'chain_head_block_id': chain_head_block_id, 'chain_finalized_block_id': chain_finalized_block_id }
def _get_last_block(self, node_interface: SubstrateInterface) -> BlockNumber: """Return the chain height. """ log.debug(f'{self.chain} querying last block', url=node_interface.url) try: last_block = node_interface.get_block_number( block_hash=node_interface.get_chain_head(), ) except ( requests.exceptions.RequestException, SubstrateRequestException, # TODO: remove TypeError once py-susbtrate-interface `get_block_number` # handles a None response. Keep ValueError just in case `get_chain_head` # returns None. # https://github.com/polkascan/py-substrate-interface/issues/68 TypeError, ValueError, ) as e: message = ( f'{self.chain} failed to request last block ' f'at endpoint: {node_interface.url} due to: {str(e)}.' ) log.error(message) raise RemoteError(message) from e log.debug(f'{self.chain} last block', last_block=last_block) return BlockNumber(last_block)
def start_harvester(self, check_gaps=False): substrate = SubstrateInterface(url=SUBSTRATE_RPC_URL, type_registry_preset=settings.TYPE_REGISTRY, runtime_config=RuntimeConfiguration()) block_sets = [] if check_gaps: # Check for gaps between already harvested blocks and try to fill them first remaining_sets_result = Block.get_missing_block_ids(self.session) for block_set in remaining_sets_result: # Get start and end block hash end_block_hash = substrate.get_block_hash( int(block_set['block_from'])) start_block_hash = substrate.get_block_hash( int(block_set['block_to'])) # Start processing task accumulate_block_recursive.delay(start_block_hash, end_block_hash) block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) # Start sequencer sequencer_task = start_sequencer.delay() # Continue from current (finalised) head if FINALIZATION_ONLY == 1: start_block_hash = substrate.get_chain_finalised_head() else: start_block_hash = substrate.get_chain_head() end_block_hash = None accumulate_block_recursive.delay(start_block_hash, end_block_hash) block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) return { 'result': 'Harvester job started', 'block_sets': block_sets, 'sequencer_task_id': sequencer_task.task_id }
def on_get(self, req, resp): substrate = SubstrateInterface(url=SUBSTRATE_RPC_URL, address_type=SUBSTRATE_ADDRESS_TYPE, type_registry_preset=TYPE_REGISTRY) resp.status = falcon.HTTP_200 # head = Block.get_head(self.session); head_hash = substrate.get_chain_head() head_number = substrate.get_block_number(head_hash) finalised_head_hash = substrate.get_chain_finalised_head() finalised_head_number = substrate.get_block_number(finalised_head_hash) extrinsicCount = Extrinsic.query(self.session).count() try: validators = substrate.get_runtime_state( module="Session", storage_function="Validators", params=[], block_hash=head_hash).get('result', []) except StorageFunctionNotFound: validators = [] try: validator_count = substrate.get_runtime_state( module="Staking", storage_function="ValidatorCount", params=[], block_hash=head_hash).get('result', 0) except StorageFunctionNotFound: validator_count = 0 transfers_count = Extrinsic.query(self.session).filter( and_(Extrinsic.module_id == 'balances', Extrinsic.call_id == 'transfer')).count() resp.media = { 'status': 'success', 'data': { 'blockNumber': head_number, 'finalizedBlockNumber': finalised_head_number, 'extrinsics': extrinsicCount, 'currValidators': len(validators), 'validators': validator_count, 'transfersCount': transfers_count } }
def start_harvester(self, check_gaps=False): print("---------- {}".format(check_gaps)) substrate = SubstrateInterface(SUBSTRATE_RPC_URL) block_sets = [] if check_gaps: # Check for gaps between already harvested blocks and try to fill them first remaining_sets_result = Block.get_missing_block_ids(self.session) for block_set in remaining_sets_result: # Get start and end block hash end_block_hash = substrate.get_block_hash( int(block_set['block_from'])) start_block_hash = substrate.get_block_hash( int(block_set['block_to'])) # Start processing task accumulate_block_recursive.delay(start_block_hash, end_block_hash) block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) # Start sequencer start_sequencer.delay() # Continue from current finalised head start_block_hash = substrate.get_chain_head() end_block_hash = None accumulate_block_recursive.delay(start_block_hash, end_block_hash) block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) return {'result': 'Harvester job started', 'block_sets': block_sets}
def _get_last_block(self, node_interface: SubstrateInterface) -> BlockNumber: """Return the chain height. """ log.debug(f'{self.chain} querying last block', url=node_interface.url) try: last_block = node_interface.get_block_number( block_hash=node_interface.get_chain_head(), ) except ( requests.exceptions.RequestException, SubstrateRequestException, WebSocketException, ) as e: message = (f'{self.chain} failed to request last block ' f'at endpoint: {node_interface.url} due to: {str(e)}.') log.error(message) raise RemoteError(message) from e log.debug(f'{self.chain} last block', last_block=last_block) return BlockNumber(last_block)
def on_post(self, req, resp): resp.status = falcon.HTTP_200 substrate = SubstrateInterface(SUBSTRATE_RPC_URL) head_hash = substrate.get_chain_head() substrate.init_runtime(head_hash) runtime = Runtime.query(self.session).get(substrate.runtime_version) if runtime: return harvester = PolkascanHarvesterService( db_session=self.session, type_registry=TYPE_REGISTRY, type_registry_file=TYPE_REGISTRY_FILE) spec_version = substrate.runtime_version harvester.process_metadata(spec_version, head_hash) self.session.commit() resp.media = {'status': 'success', 'data': {}}
class JSONRPCResource(BaseResource): def __init__(self, cache_region): self.cache_region = cache_region # Check for custom types in Redis self.substrate = None custom_type_registry = self.cache_region.get('CUSTOM_TYPE_REGISTRY') self.init_type_registry(custom_type_registry) self.block_hash = None self.metadata_decoder = None self.runtime_version = None self.metadata_cache = {} self.methods = [ 'rpc_methods', 'runtime_decodeScale', 'runtime_encodeScale', 'runtime_getMetadata', 'runtime_getMetadataModules', 'runtime_getMetadataCallFunctions', 'runtime_getMetadataCallFunction', 'runtime_getMetadataEvents', 'runtime_getMetadataEvent', 'runtime_getMetadataConstants', 'runtime_getMetadataConstant', 'runtime_getMetadataStorageFunctions', 'runtime_getMetadataStorageFunction', 'runtime_getMetadataErrors', 'runtime_getMetadataError', 'runtime_getState', 'runtime_getTypeRegistry', 'runtime_getType', 'runtime_getCustomTypes', 'runtime_addCustomType', 'runtime_setCustomTypes', 'runtime_removeCustomType', 'runtime_resetCustomTypes', 'runtime_getBlock', 'runtime_createSignaturePayload', 'runtime_createExternalSignerPayload', 'runtime_createExtrinsic', 'runtime_submitExtrinsic', 'runtime_getPaymentInfo', 'keypair_create', 'keypair_inspect', 'keypair_sign', 'keypair_verify' ] def get_request_param(self, params): try: return params.pop(0) except IndexError: raise ValueError("Not enough parameters provided") def init_type_registry(self, custom_type_registry=None): if settings.TYPE_REGISTRY_FILE: type_registry = load_type_registry_file(settings.TYPE_REGISTRY_FILE) else: type_registry = {} if custom_type_registry: type_registry.update(custom_type_registry) self.substrate = SubstrateInterface( url=settings.SUBSTRATE_RPC_URL, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, type_registry_preset=settings.TYPE_REGISTRY, type_registry=custom_type_registry ) if settings.DEBUG: print('Custom types at init: ', custom_type_registry) self.substrate.debug = True def init_request(self, params=None): if params: self.block_hash = self.get_request_param(params) if type(self.block_hash) is int: self.block_hash = self.substrate.get_block_hash(self.block_hash) def on_post(self, req, resp): self.block_hash = None self.metadata_decoder = None self.runtime_version = None self.substrate.request_id = req.media.get('id') method = req.media.get('method') params = req.media.get('params', []) # Check request requirements if not req.media.get('jsonrpc'): resp.media = { "error": { "code": -32600, "message": "Unsupported JSON-RPC protocol version" }, "id": req.media.get('id') } elif not method: resp.media = { "error": { "code": -32601, "message": "Method not found" }, "id": req.media.get('id') } elif method not in self.methods: # Default pass through request to Substrate RPC resp.media = self.substrate.rpc_request(method, params) else: resp.status = falcon.HTTP_200 try: # Process methods if method == 'runtime_getBlock': self.init_request(params) block = self.substrate.get_block(block_hash=self.block_hash) if block: block['extrinsics'] = [extrinsic.value for extrinsic in block['extrinsics']] block['header']["digest"]["logs"] = [log.value for log in block['header']["digest"]["logs"]] response = { "jsonrpc": "2.0", "result": block, "id": req.media.get('id') } elif method == 'runtime_getState': # Init params storage_params = None # Process params module = self.get_request_param(params) storage_function = self.get_request_param(params) if params: storage_params = self.get_request_param(params) self.init_request(params) # Get response obj = self.substrate.query( module=module, storage_function=storage_function, params=storage_params, block_hash=self.block_hash ) response = {'result': obj.value if obj else None} elif method == 'runtime_getMetadata': # Process params self.init_request(params) # Get response response = self.substrate.get_runtime_metadata(block_hash=self.block_hash) elif method in ['runtime_createSignaturePayload', 'runtime_createExternalSignerPayload']: account = self.get_request_param(params) call_module = self.get_request_param(params) call_function = self.get_request_param(params) call_params = self.get_request_param(params) tip = self.get_request_param(params) or 0 era = self.get_request_param(params) self.init_request(params) try: # Create call call = self.substrate.compose_call( call_module=call_module, call_function=call_function, call_params=call_params, block_hash=self.block_hash ) nonce = self.substrate.get_account_nonce(account) or 0 if isinstance(era, dict) and 'current' not in era and 'phase' not in era: # Retrieve current block id era['current'] = self.substrate.get_block_number(self.substrate.get_chain_head()) if method == 'runtime_createExternalSignerPayload': include_call_length = True else: include_call_length = False # Generate signature payload signature_payload = self.substrate.generate_signature_payload( call=call, nonce=nonce, tip=tip, era=era, include_call_length=include_call_length ) response = { "jsonrpc": "2.0", "result": { 'signature_payload': str(signature_payload), 'nonce': nonce, 'era': era }, "id": req.media.get('id') } except ValueError as e: response = { "jsonrpc": "2.0", "error": { "code": -999, "message": str(e) }, "id": req.media.get('id') } elif method in ['runtime_submitExtrinsic', 'runtime_createExtrinsic']: account = self.get_request_param(params) call_module = self.get_request_param(params) call_function = self.get_request_param(params) call_params = self.get_request_param(params) tip = self.get_request_param(params) or 0 era = self.get_request_param(params) crypto_type = int(self.get_request_param(params) or 1) signature = self.get_request_param(params) self.init_request(params) try: # Create call call = self.substrate.compose_call( call_module=call_module, call_function=call_function, call_params=call_params, block_hash=self.block_hash ) nonce = self.substrate.get_account_nonce(account) or 0 # Create keypair with only public given given in request keypair = Keypair(ss58_address=account, crypto_type=crypto_type) if isinstance(era, dict) and 'current' in era: era['current'] = int(era['current']) # Create extrinsic extrinsic = self.substrate.create_signed_extrinsic( call=call, keypair=keypair, nonce=nonce, signature=signature, tip=tip, era=era ) if method == 'runtime_createExtrinsic': result = str(extrinsic.data) else: # Submit extrinsic to the node extrinsic_result = self.substrate.submit_extrinsic( extrinsic=extrinsic ) result = { "extrinsic_hash": extrinsic_result.extrinsic_hash, "block_hash": extrinsic_result.block_hash, "finalized": extrinsic_result.finalized, } response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } except ValueError as e: response = { "jsonrpc": "2.0", "error": { "code": -999, "message": str(e) }, "id": req.media.get('id') } except SubstrateRequestException as e: response = { "jsonrpc": "2.0", "error": e.args[0], "id": req.media.get('id') } elif method == 'runtime_getPaymentInfo': account = self.get_request_param(params) call_module = self.get_request_param(params) call_function = self.get_request_param(params) call_params = self.get_request_param(params) # Create call call = self.substrate.compose_call( call_module=call_module, call_function=call_function, call_params=call_params ) # Create keypair with only public given given in request keypair = Keypair(ss58_address=account) response = { "jsonrpc": "2.0", "result": self.substrate.get_payment_info(call=call, keypair=keypair), "id": req.media.get('id') } elif method == 'runtime_getMetadataModules': self.init_request(params) response = { "jsonrpc": "2.0", "result": self.substrate.get_metadata_modules(block_hash=self.block_hash), "id": req.media.get('id') } elif method == 'runtime_getMetadataCallFunctions': self.init_request(params) call_list = self.substrate.get_metadata_call_functions(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": call_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataCallFunction': param_call_module = self.get_request_param(params) param_call_module_function = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_call_function( module_name=param_call_module, call_function_name=param_call_module_function, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result.value, "id": req.media.get('id') } elif method == 'runtime_getMetadataEvents': self.init_request(params) event_list = self.substrate.get_metadata_events(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": event_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataEvent': param_call_module = self.get_request_param(params) param_call_module_event = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_event( module_name=param_call_module, event_name=param_call_module_event, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getMetadataConstants': self.init_request(params) constant_list = self.substrate.get_metadata_constants(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": constant_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataConstant': module_name = self.get_request_param(params) constant_name = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_constant( module_name=module_name, constant_name=constant_name, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getMetadataStorageFunctions': self.init_request(params) storage_list = self.substrate.get_metadata_storage_functions(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": storage_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataStorageFunction': module_name = self.get_request_param(params) storage_name = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_storage_function( module_name=module_name, storage_name=storage_name, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result.value, "id": req.media.get('id') } elif method == 'runtime_getMetadataErrors': self.init_request(params) error_list = self.substrate.get_metadata_errors(block_hash=self.block_hash) response = { "jsonrpc": "2.0", "result": error_list, "id": req.media.get('id') } elif method == 'runtime_getMetadataError': module_name = self.get_request_param(params) error_name = self.get_request_param(params) self.init_request(params) result = self.substrate.get_metadata_error( module_name=module_name, error_name=error_name, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getTypeRegistry': self.init_request(params) result = self.substrate.get_type_registry(block_hash=self.block_hash) if result: result = list(result.values()) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getType': type_string = self.get_request_param(params) self.init_request(params) response = { "jsonrpc": "2.0", "result": self.substrate.get_type_definition(type_string, block_hash=self.block_hash), "id": req.media.get('id') } elif method == 'runtime_addCustomType': type_string = self.get_request_param(params) type_definition = self.get_request_param(params) # Retrieve current custom type registry custom_type_registry = self.cache_region.get('CUSTOM_TYPE_REGISTRY') if not custom_type_registry: custom_type_registry = { 'types': { } } custom_type_registry['types'][type_string] = type_definition # TODO Try to decode given type definition # Store updated custom type registry self.cache_region.set('CUSTOM_TYPE_REGISTRY', custom_type_registry) if settings.DEBUG: print('Custom types updated to: ', custom_type_registry) # Update runtime configuration RuntimeConfiguration().update_type_registry(custom_type_registry) response = { "jsonrpc": "2.0", "result": "Type registry updated", "id": req.media.get('id') } elif method == 'runtime_setCustomTypes': custom_types = self.get_request_param(params) if type(custom_types) is not dict: raise ValueError('custom types must be in format: {"type_string": "type_definition"}') custom_type_registry = { 'types': custom_types } # Store updated custom type registry self.cache_region.set('CUSTOM_TYPE_REGISTRY', custom_type_registry) # Reset runtime configuration RuntimeConfiguration().clear_type_registry() self.init_type_registry(custom_type_registry) if settings.DEBUG: print('Custom types updated to: ', custom_type_registry) response = { "jsonrpc": "2.0", "result": "Type registry updated", "id": req.media.get('id') } elif method == 'runtime_resetCustomTypes': custom_type_registry = None # Store updated custom type registry self.cache_region.set('CUSTOM_TYPE_REGISTRY', custom_type_registry) # Reset runtime configuration RuntimeConfiguration().clear_type_registry() self.init_type_registry() if settings.DEBUG: print('Custom types cleared') response = { "jsonrpc": "2.0", "result": "Custom types cleared", "id": req.media.get('id') } elif method == 'runtime_removeCustomType': type_string = self.get_request_param(params) # Retrieve current custom type registry custom_type_registry = self.cache_region.get('CUSTOM_TYPE_REGISTRY') if custom_type_registry and type_string in custom_type_registry.get('types', {}): del custom_type_registry['types'][type_string] # Store updated custom type registry self.cache_region.set('CUSTOM_TYPE_REGISTRY', custom_type_registry) # Reset runtime configuration RuntimeConfiguration().clear_type_registry() self.init_type_registry(custom_type_registry) result = '"{}" removed from custom type registry'.format(type_string) else: result = '"{}" not found in custom type registry'.format(type_string) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_getCustomTypes': custom_type_registry = self.cache_region.get('CUSTOM_TYPE_REGISTRY') if custom_type_registry: result = custom_type_registry.get('types') else: result = {} response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_decodeScale': type_string = self.get_request_param(params) scale_hex_bytes = self.get_request_param(params) self.init_request(params) result = self.substrate.decode_scale( type_string=type_string, scale_bytes=scale_hex_bytes, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'runtime_encodeScale': type_string = self.get_request_param(params) value = self.get_request_param(params) self.init_request(params) result = self.substrate.encode_scale( type_string=type_string, value=value, block_hash=self.block_hash ) response = { "jsonrpc": "2.0", "result": result, "id": req.media.get('id') } elif method == 'keypair_create': word_count = self.get_request_param(params) or 0 crypto_type = int(self.get_request_param(params) or 1) mnemonic = Keypair.generate_mnemonic(word_count) keypair = Keypair.create_from_mnemonic( mnemonic=mnemonic, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, crypto_type=crypto_type ) response = { "jsonrpc": "2.0", "result": { 'ss58_address': keypair.ss58_address, 'public_key': keypair.public_key, 'private_key': keypair.private_key, 'mnemonic': keypair.mnemonic, }, "id": req.media.get('id') } elif method == 'keypair_inspect': mnemonic = self.get_request_param(params) crypto_type = int(self.get_request_param(params) or 1) keypair = Keypair.create_from_mnemonic( mnemonic=mnemonic, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, crypto_type=crypto_type ) response = { "jsonrpc": "2.0", "result": { 'ss58_address': keypair.ss58_address, 'public_key': keypair.public_key, 'private_key': keypair.private_key, 'mnemonic': keypair.mnemonic, }, "id": req.media.get('id') } elif method == 'keypair_sign': mnemonic = self.get_request_param(params) data = self.get_request_param(params) crypto_type = int(self.get_request_param(params) or 1) keypair = Keypair.create_from_mnemonic( mnemonic=mnemonic, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, crypto_type=crypto_type ) signature = keypair.sign(data) response = { "jsonrpc": "2.0", "result": {'signature': signature}, "id": req.media.get('id') } elif method == 'keypair_verify': account_address = self.get_request_param(params) data = self.get_request_param(params) signature = self.get_request_param(params) crypto_type = int(self.get_request_param(params) or 1) keypair = Keypair( ss58_address=account_address, ss58_format=settings.SUBSTRATE_ADDRESS_TYPE, crypto_type=crypto_type ) result = keypair.verify(data, signature) response = { "jsonrpc": "2.0", "result": {'verified': result}, "id": req.media.get('id') } elif method == 'rpc_methods': response = self.substrate.rpc_request(method, params) # Add additional implemented method response['result']['methods'] = sorted(response['result']['methods'] + self.methods) else: raise NotImplementedError('Method \'{}\' not implemented yet'.format(method)) except (ValueError, NotImplementedError) as e: response = { "error": { "code": -999, "message": str(e) }, "id": req.media.get('id') } except (InvalidScaleTypeValueException, RemainingScaleBytesNotEmptyException) as e: response = { "error": { "code": -998, "message": "Decoding error, given SCALE-value or type registry might be invalid " }, "id": req.media.get('id') } resp.media = response
def start_harvester(self, check_gaps=False, shard=None): shard = self.request.args[0] if shard is None: raise HarvesterNotshardParamsError( 'params shard is missing.. stopping harvester ') print("start_harvester") substrate_url = SHARDS_TABLE[shard] print('== start_harvester substrate_url {} =='.format(substrate_url)) substrate = SubstrateInterface(substrate_url) n = Block.query(self.session).filter_by(bid=1).count() if n < 4: print('waiting init task completed! count().n: {} '.format(n)) return {'result': 'waiting init task completed! '} block_sets = [] harvester = PolkascanHarvesterService(self.session, type_registry=TYPE_REGISTRY) harvester.metadata_store = self.metadata_store start_block_hash = substrate.get_chain_head() end_block_hash = None r = 10 block_nr = substrate.get_block_number(start_block_hash) max_block = Block.query(self.session).filter_by( shard_num=shard.split(".")[1]).order_by(Block.bid.desc()).first() print('start block_nr {} =='.format(block_nr)) print('start max_block {} =='.format(max_block.bid)) if block_nr - max_block.bid < 10: r = block_nr - max_block.bid print('current range r: {} =='.format(max_block.bid)) try: for nr in range(1, r + 1): block_hash = substrate.get_block_hash(max_block.bid + nr) if harvester.add_block(block_hash, substrate_url): print('start_harvester+ Added {} '.format(block_hash)) self.session.commit() # Update persistent metadata store in Celery task self.metadata_store = harvester.metadata_store except BlockAlreadyAdded as e: print('. Skipped {} '.format(block_hash)) except IntegrityError as e: print('. Skipped duplicate {}=={} '.format(block_hash, e)) except Exception as exc: print('! ERROR adding {}'.format(block_hash)) raise HarvesterCouldNotAddBlock(block_hash) from exc block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) return { 'result': 'Yee data Synchronization job SUCCESS', 'block_sets': block_sets, 'result': 'Synch data from {} to {} blocks check by shardnum of {}'.format( max_block.bid + 1, r + max_block.bid + 1, shard) }
def integrity_checks(self): # 1. Check finalized head substrate = SubstrateInterface(settings.SUBSTRATE_RPC_URL) if settings.FINALIZATION_BY_BLOCK_CONFIRMATIONS > 0: finalized_block_hash = substrate.get_chain_head() finalized_block_number = max( substrate.get_block_number(finalized_block_hash) - settings.FINALIZATION_BY_BLOCK_CONFIRMATIONS, 0 ) else: finalized_block_hash = substrate.get_chain_finalised_head() finalized_block_number = substrate.get_block_number(finalized_block_hash) # 2. Check integrity head integrity_head = Status.get_status(self.db_session, 'INTEGRITY_HEAD') if not integrity_head.value: # Only continue if block #1 exists if Block.query(self.db_session).filter_by(id=1).count() == 0: raise BlockIntegrityError('Chain not at genesis') integrity_head.value = 0 else: integrity_head.value = int(integrity_head.value) start_block_id = max(integrity_head.value - 1, 0) end_block_id = finalized_block_number chunk_size = 1000 parent_block = None if start_block_id < end_block_id: # Continue integrity check # print('== Start integrity checks from {} to {} =='.format(start_block_id, end_block_id)) for block_nr in range(start_block_id, end_block_id, chunk_size): # TODO replace limit with filter_by block range block_range = Block.query(self.db_session).order_by('id')[block_nr:block_nr + chunk_size] for block in block_range: if parent_block: if block.id != parent_block.id + 1: # Save integrity head if block hash of parent matches with hash in node if parent_block.hash == substrate.get_block_hash(integrity_head.value): integrity_head.save(self.db_session) self.db_session.commit() raise BlockIntegrityError('Block #{} is missing.. stopping check '.format(parent_block.id + 1)) elif block.parent_hash != parent_block.hash: self.process_reorg_block(parent_block) self.process_reorg_block(block) self.remove_block(block.hash) self.remove_block(parent_block.hash) self.db_session.commit() self.add_block(substrate.get_block_hash(block.id)) self.add_block(substrate.get_block_hash(parent_block.id)) self.db_session.commit() integrity_head.value = parent_block.id - 1 # Save integrity head if block hash of parent matches with hash in node #if parent_block.parent_hash == substrate.get_block_hash(integrity_head.value): integrity_head.save(self.db_session) self.db_session.commit() raise BlockIntegrityError('Block #{} failed integrity checks, Re-adding #{}.. '.format(parent_block.id, block.id)) else: integrity_head.value = block.id parent_block = block if block.id == end_block_id: break if parent_block: if parent_block.hash == substrate.get_block_hash(int(integrity_head.value)): integrity_head.save(self.db_session) self.db_session.commit() return {'integrity_head': integrity_head.value}
import json types = json.load(open("../assets/types.json", "r")) # url = "http://127.0.0.1:9933" url = "https://rpc-testnet.acria.network" s = SubstrateInterface(url=url, ss58_format=42, type_registry=types, type_registry_preset='substrate-node-template') s.chain, s.version, s.properties s.token_symbol, s.token_decimals s.get_chain_head() ## for becoming a validator # s.rpc_request('author_rotateKeys', None) ## totalSupply s.query('Balances', 'TotalIssuance') ## current metadata s.get_runtime_metadata() ## send 100 tokens from Alice -> Bob from substrateinterface import Keypair alice = Keypair.create_from_uri('//Alice') bob = Keypair.create_from_uri('//Bob')
class Kuka: def __init__(self): rospy.init_node('listener', anonymous=False) rospack = rospkg.RosPack() rospack.list() self.path = rospack.get_path('kuka_controller') with open(f"{self.path}/config/config", "r") as f: config = json.load(f) self.client = ipfshttpclient.connect() self.substrate = SubstrateInterface( url=config["node"], ss58_format=32, type_registry_preset="substrate-node-template", type_registry={ "types": { "Record": "Vec<u8>", "Parameter": "Bool", "LaunchParameter": "Bool", "<T as frame_system::Config>::AccountId": "AccountId", "RingBufferItem": { "type": "struct", "type_mapping": [ ["timestamp", "Compact<u64>"], ["payload", "Vec<u8>"], ], }, "RingBufferIndex": { "type": "struct", "type_mapping": [ ["start", "Compact<u64>"], ["end", "Compact<u64>"], ], } } }, ) mnemonic = config["kuka_mnemonic"] self.keypair = Keypair.create_from_mnemonic(mnemonic, ss58_format=32) # Call service move_arm def move_arm_client(self, desired_xyz, duration): rospy.wait_for_service('move_arm') try: move_arm = rospy.ServiceProxy('move_arm', MoveArm) resp = move_arm(desired_xyz, duration) return resp except rospy.ServiceException as e: rospy.loginfo("Service call failed: %s" % e) # Write data to a file def listener(self, data): if self.write: times_prev = self.times self.times = int(time.time()) if self.times != times_prev: #print('write') self.logs.write('\n') self.logs.write(str(data)) def write_datalog(self, data): call = self.substrate.compose_call(call_module="Datalog", call_function="record", call_params={'record': data}) extrinsic = self.substrate.create_signed_extrinsic( call=call, keypair=self.keypair) receipt = self.substrate.submit_extrinsic(extrinsic, wait_for_inclusion=True) rospy.loginfo( f"Datalog created with extrinsic hash: {receipt.extrinsic_hash}") # Print circle def circle(self): rospy.loginfo("Work paid. Starting work...") t = 0 self.logs = open(f'{self.path}/data.txt', 'w') self.move_arm_client( [Float64(0.3), Float64(0.3), Float64(0.6)], Float64(2.0)) self.times = 0 self.write = True rospy.Subscriber('/manipulator/joint_states', JointState, self.listener) while t <= math.pi: x = 0.3 * math.cos(t) z = 0.3 * math.sin(t) + 0.6 t += 0.2 #print(x, z) self.move_arm_client( [Float64(x), Float64(0.3), Float64(z)], Float64(0.05)) self.write = False rospy.loginfo("Work done") self.logs.close() res = self.client.add(f'{self.path}/data.txt') rospy.loginfo(f"Data pinned to IPFS with hash {res['Hash']}") self.write_datalog(res['Hash']) rospy.loginfo(f"Wait for payment") def subscription_handler(self, obj, update_nr, subscription_id): ch = self.substrate.get_chain_head() chain_events = self.substrate.get_events(ch) for ce in chain_events: # if ce.value["event_id"] == "NewLaunch": # print(ce.params) if ce.value["event_id"] == "NewLaunch" and ce.params[1]["value"] == self.keypair.ss58_address \ and ce.params[2]["value"] is True: # yes/no print(f"\"ON\" launch command from employer.") self.circle() def spin(self): rospy.loginfo(f"Wait for payment") self.substrate.subscribe_block_headers(self.subscription_handler)