def _generate_block_indexes_from_scratch() -> None: client = _get_redisearch_index_client(Indexes.block.value) client.create_index([ redisearch.NumericField("block_id", sortable=True), redisearch.NumericField("prev_id", sortable=True), redisearch.NumericField("timestamp", sortable=True), ]) _log.info("Listing all blocks in storage") block_paths = storage.list_objects("BLOCK/") pattern = re.compile(r"BLOCK\/[0-9]+$") for block_path in block_paths: if re.search(pattern, block_path): _log.info(f"Adding index for {block_path}") raw_block = storage.get_json_from_object(block_path) block = cast("model.BlockModel", None) if LEVEL == "1": block = l1_block_model.new_from_stripped_block(raw_block) elif LEVEL == "2": block = l2_block_model.new_from_at_rest(raw_block) elif LEVEL == "3": block = l3_block_model.new_from_at_rest(raw_block) elif LEVEL == "4": block = l4_block_model.new_from_at_rest(raw_block) elif LEVEL == "5": block = l5_block_model.new_from_at_rest(raw_block) put_document(Indexes.block.value, block.block_id, block.export_as_search_index())
def process_receipt_v1(block_dto: Dict[str, Any]) -> None: if not block_dto: raise exceptions.ValidationException("block_dto missing") _log.info( f"[RECEIPT] Got receipt from L{block_dto['header']['level']}: {block_dto}" ) block_model = cast("model.BlockModel", None) # This will always get defined, or it will raise level_received_from: int = block_dto["header"]["level"] if level_received_from == 2: block_model = l2_block_model.new_from_at_rest(block_dto) elif level_received_from == 3: block_model = l3_block_model.new_from_at_rest(block_dto) elif level_received_from == 4: block_model = l4_block_model.new_from_at_rest(block_dto) elif level_received_from == 5: block_model = l5_block_model.new_from_at_rest(block_dto) else: raise exceptions.InvalidNodeLevel("Unsupported level receipt") _log.info(f"Block model {block_model.__dict__}") l1_block_id_set = block_model.get_associated_l1_block_id() _log.info( f"Processing receipt for blocks {l1_block_id_set} from L{level_received_from}" ) for l1_block_id in l1_block_id_set: # Check that the chain which sent this receipt is in our claims, and that this L1 block is accepting receipts for this level validations = matchmaking.get_claim_check( l1_block_id)["validations"][f"l{level_received_from}"] if ( block_model.dc_id in validations ) and broadcast_functions.is_block_accepting_verifications_from_level( l1_block_id, level_received_from): _log.info( f"Verified that block {l1_block_id} was sent. Inserting receipt" ) storage_location = broadcast_functions.verification_storage_location( l1_block_id, level_received_from, block_model.dc_id) storage.put_object_as_json(storage_location, block_model.export_as_at_rest()) # Set new receipt for matchmaking claim check try: block_id = block_model.block_id proof = block_model.proof dc_id = block_model.dc_id matchmaking.add_receipt(l1_block_id, level_received_from, dc_id, block_id, proof) except Exception: _log.exception("matchmaking add_receipt failed!") # Update the broadcast system about this receipt broadcast_functions.set_receieved_verification_for_block_from_chain_sync( l1_block_id, level_received_from, block_model.dc_id) else: _log.warning( f"Chain {block_model.dc_id} (level {level_received_from}) returned a receipt that wasn't expected (possibly expired?) for block {l1_block_id}. Rejecting receipt" # noqa: B950 ) raise exceptions.NotAcceptingVerifications( f"Not accepting verifications for block {l1_block_id} from {block_model.dc_id}" )
def _generate_block_indexes() -> None: client = _get_redisearch_index_client(Indexes.block.value) try: client.create_index([ redisearch.NumericField("block_id", sortable=True), redisearch.NumericField("prev_id", sortable=True), redisearch.NumericField("timestamp", sortable=True), ]) except redis.exceptions.ResponseError as e: if not str(e).startswith("Index already exists" ): # We don't care if index already exists raise _log.info("Listing all blocks in storage") block_paths = storage.list_objects("BLOCK/") pattern = re.compile(r"BLOCK\/[0-9]+$") for block_path in block_paths: if re.search(pattern, block_path): # do a check to see if this block was already marked as indexed if not client.redis.sismember(BLOCK_MIGRATION_KEY, block_path): _log.info(f"Adding index for {block_path}") raw_block = storage.get_json_from_object(block_path) block = cast("model.BlockModel", None) if LEVEL == "1": block = l1_block_model.new_from_stripped_block(raw_block) elif LEVEL == "2": block = l2_block_model.new_from_at_rest(raw_block) elif LEVEL == "3": block = l3_block_model.new_from_at_rest(raw_block) elif LEVEL == "4": block = l4_block_model.new_from_at_rest(raw_block) elif LEVEL == "5": block = l5_block_model.new_from_at_rest(raw_block) put_document(Indexes.block.value, block.block_id, block.export_as_search_index()) client.redis.sadd(BLOCK_MIGRATION_KEY, block_path) else: _log.info(f"Skipping already indexed block {block_path}")
def test_create_from_at_rest(self): first_block = create_l4_block() second_block = l4_block_model.new_from_at_rest( first_block.export_as_at_rest()) self.assertDictEqual(first_block.__dict__, second_block.__dict__)