def on_get(self, req, resp): last_known_block = Block.query(self.session).order_by(Block.id.desc()).first() if not last_known_block: resp.media = { 'status': 'success', 'data': { 'message': 'Harvester waiting for first run' } } else: remaining_sets_result = Block.get_missing_block_ids(self.session) resp.status = falcon.HTTP_200 resp.media = { 'status': 'success', 'data': { 'harvester_head': last_known_block.id, 'block_process_queue': [ {'from': block_set['block_from'], 'to': block_set['block_to']} for block_set in remaining_sets_result ] } }
def start_harvester(self, check_gaps=False): substrate = SubstrateInterface(url=SUBSTRATE_RPC_URL, type_registry_preset=settings.TYPE_REGISTRY, runtime_config=RuntimeConfiguration()) block_sets = [] if check_gaps: # Check for gaps between already harvested blocks and try to fill them first remaining_sets_result = Block.get_missing_block_ids(self.session) for block_set in remaining_sets_result: # Get start and end block hash end_block_hash = substrate.get_block_hash( int(block_set['block_from'])) start_block_hash = substrate.get_block_hash( int(block_set['block_to'])) # Start processing task accumulate_block_recursive.delay(start_block_hash, end_block_hash) block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) # Start sequencer sequencer_task = start_sequencer.delay() # Continue from current (finalised) head if FINALIZATION_ONLY == 1: start_block_hash = substrate.get_chain_finalised_head() else: start_block_hash = substrate.get_chain_head() end_block_hash = None accumulate_block_recursive.delay(start_block_hash, end_block_hash) block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) return { 'result': 'Harvester job started', 'block_sets': block_sets, 'sequencer_task_id': sequencer_task.task_id }
def start_harvester(self, check_gaps=False): print("---------- {}".format(check_gaps)) substrate = SubstrateInterface(SUBSTRATE_RPC_URL) block_sets = [] if check_gaps: # Check for gaps between already harvested blocks and try to fill them first remaining_sets_result = Block.get_missing_block_ids(self.session) for block_set in remaining_sets_result: # Get start and end block hash end_block_hash = substrate.get_block_hash( int(block_set['block_from'])) start_block_hash = substrate.get_block_hash( int(block_set['block_to'])) # Start processing task accumulate_block_recursive.delay(start_block_hash, end_block_hash) block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) # Start sequencer start_sequencer.delay() # Continue from current finalised head start_block_hash = substrate.get_chain_head() end_block_hash = None accumulate_block_recursive.delay(start_block_hash, end_block_hash) block_sets.append({ 'start_block_hash': start_block_hash, 'end_block_hash': end_block_hash }) return {'result': 'Harvester job started', 'block_sets': block_sets}