def handle(self, connection_id, message_content): gossip_message = network_pb2.GossipMessage() gossip_message.ParseFromString(message_content) if gossip_message.content_type == network_pb2.GossipMessage.BLOCK: block = Block() block.ParseFromString(gossip_message.content) LOGGER.debug("CompleterGossipHandler: BLOCK=%s", block.header_signature[:8]) self._completer.add_block(block) elif gossip_message.content_type == network_pb2.GossipMessage.BATCH: batch = Batch() batch.ParseFromString(gossip_message.content) #candidate_id = gossip_message.candidate_id.hex() LOGGER.debug("CompleterGossipHandler: NEW BATCH=%s ", batch.header_signature[:8]) # works in case batch from another node self._completer.add_batch(batch) elif gossip_message.content_type == network_pb2.GossipMessage.BATCHES: batches = BatchList() batches.ParseFromString(gossip_message.content) candidate_id = batches.candidate_id.hex() num = len(batches.batches) #LOGGER.debug("CompleterGossipHandler: NEW BATCHES=%s candidate_id=%s", batches,candidate_id[:8]) block_num = batches.block_num for batch in batches.batches: LOGGER.debug(" => NEW BATCH[%s]=%s candidate_id=%s.%s", num, batch.header_signature[:8], block_num, candidate_id[:8]) self._completer.add_batch(batch, (candidate_id, block_num, num)) return HandlerResult(status=HandlerStatus.PASS)
def _network_violation_result(): violation = AuthorizationViolation( violation=RoleType.Value("NETWORK")) return HandlerResult( HandlerStatus.RETURN_AND_CLOSE, message_out=violation, message_type=validator_pb2.Message.AUTHORIZATION_VIOLATION)
def handle(self, connection_id, message_content): response_proto = client_batch_submit_pb2.ClientBatchSubmitResponse LOGGER.debug("BatchListPermissionVerifier:handle .. ") def make_response(out_status): return HandlerResult( status=HandlerStatus.RETURN, message_out=response_proto(status=out_status), message_type=Message.CLIENT_BATCH_SUBMIT_RESPONSE) for batch in message_content.batches: if batch.trace or True: LOGGER.debug("TRACE %s: %s", batch.header_signature, self.__class__.__name__) if not all( self._verifier.check_off_chain_batch_roles(batch) for batch in message_content.batches): return make_response(response_proto.INVALID_BATCH) if not all( self._verifier.is_batch_signer_authorized(batch) for batch in message_content.batches): return make_response(response_proto.INVALID_BATCH) LOGGER.debug("BatchListPermissionVerifier:handle PASS") return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): """ If a PingResponse is received and there is not a future to resolve, the message is dropped. Interconnect will have already updated the last message time for the connection. """ return HandlerResult(HandlerStatus.DROP)
def handle(self, connection_id, message_content): request = processor_pb2.TpRegisterRequest() request.ParseFromString(message_content) if request.max_occupancy == 0: max_occupancy = DEFAULT_MAX_OCCUPANCY LOGGER.warning( 'Max occupancy was not provided by transaction processor: %s.' ' Using default max occupancy: %s', connection_id, DEFAULT_MAX_OCCUPANCY) else: max_occupancy = request.max_occupancy LOGGER.info( 'registered transaction processor: connection_id=%s, family=%s, ' 'version=%s, namespaces=%s, max_occupancy=%s', connection_id, request.family, request.version, list(request.namespaces), max_occupancy) processor_type = processor_manager.ProcessorType( request.family, request.version) processor = processor_manager.Processor(connection_id, request.namespaces, max_occupancy) self._collection[processor_type] = processor ack = processor_pb2.TpRegisterResponse() ack.status = ack.OK return HandlerResult( status=HandlerStatus.RETURN, message_out=ack, message_type=validator_pb2.Message.TP_REGISTER_RESPONSE)
def handle(self, identity, message_content): request = client_pb2.ClientStateListRequest() resp_proto = client_pb2.ClientStateListResponse status = resp_proto.OK try: request.ParseFromString(message_content) self._tree.set_merkle_root(request.merkle_root) except KeyError as e: status = resp_proto.NORESOURCE LOGGER.debug(e) except DecodeError: status = resp_proto.ERROR LOGGER.info( "Expected protobuf of class %s failed to " "deserialize", request) if status != resp_proto.OK: response = resp_proto(status=status) else: prefix = request.prefix leaves = self._tree.leaves(prefix) if len(leaves) == 0: status = resp_proto.NORESOURCE response = resp_proto(status=status) else: entries = [Entry(address=a, data=v) for a, v in leaves.items()] response = resp_proto(status=status, entries=entries) return HandlerResult( status=HandlerStatus.RETURN, message_out=response, message_type=validator_pb2.Message.CLIENT_STATE_LIST_RESPONSE)
def handle(self, connection_id, message_content): request = processor_pb2.TpRegisterRequest() request.ParseFromString(message_content) LOGGER.info( 'registered transaction processor: connection_id=%s, family=%s, version=%s, namespaces=%s', connection_id[:8], request.family, request.version, list(request.namespaces)) processor_type = processor_iterator.ProcessorType( request.family, request.version) processor = processor_iterator.Processor(connection_id, request.namespaces) if processor_type in self._collection: LOGGER.debug( 'Already registered transaction processor:family=%s, version=%s, namespaces=%s', request.family, request.version, list(request.namespaces)) self._collection[processor_type] = processor LOGGER.debug('All registered transaction processors=%s', self._collection) ack = processor_pb2.TpRegisterResponse() ack.status = ack.OK return HandlerResult( status=HandlerStatus.RETURN, message_out=ack, message_type=validator_pb2.Message.TP_REGISTER_RESPONSE)
def handle(self, connection_id, message_content): block_response_message = GossipBlockResponse() block_response_message.ParseFromString(message_content) block = Block() block.ParseFromString(block_response_message.content) if block.header_signature in self._seen_cache: self.block_dropped_count.inc() return HandlerResult(status=HandlerStatus.DROP) if not is_valid_block(block): LOGGER.debug("requested block's signature is invalid: %s", block.header_signature) return HandlerResult(status=HandlerStatus.DROP) self._seen_cache = TimedCache() return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): batch, message_content = message_content open_request = self._responder.get_request(batch.header_signature) if open_request is None: open_request = [] requests_to_remove = [batch.header_signature] for txn in batch.transactions: requests_by_txn = self._responder.get_request(txn.header_signature) if requests_by_txn is not None: open_request += requests_by_txn requests_to_remove += [txn.header_signature] for connection in open_request: LOGGER.debug("Responding to batch requests: Send %s to %s", batch.header_signature, connection) try: self._gossip.send(validator_pb2.Message.GOSSIP_BATCH_RESPONSE, message_content, connection) except ValueError: LOGGER.debug( "Can't send batch response %s to closed " "connection %s", batch.header_signature, connection) for requested_id in requests_to_remove: self._responder.remove_request(requested_id) return HandlerResult(HandlerStatus.PASS)
def handle(self, connection_id, message_content): request = RegisterStateDeltaSubscriberRequest() request.ParseFromString(message_content) ack = RegisterStateDeltaSubscriberResponse() if self._delta_processor.is_valid_subscription( request.last_known_block_ids): ack.status = ack.OK return HandlerResult(HandlerStatus.RETURN_AND_PASS, message_out=ack, message_type=self._msg_type) else: ack.status = ack.UNKNOWN_BLOCK return HandlerResult(HandlerStatus.RETURN, message_out=ack, message_type=self._msg_type)
def handle(self, connection_id, message_content): response_proto = client_pb2.ClientBatchSubmitResponse allowed_pubkeys = None try: state_root = self.current_root_func() settings_view = \ self.settings_view_factory.create_settings_view(state_root) allowed_pubkeys = settings_view.get_setting( "sawtooth.validator.allowed_signing_keys") except AttributeError: LOGGER.debug("Chain head not yet set") def make_response(out_status): return HandlerResult( status=HandlerStatus.RETURN, message_out=response_proto(status=out_status), message_type=Message.CLIENT_BATCH_SUBMIT_RESPONSE) try: if allowed_pubkeys is not None: request = client_pb2.ClientBatchSubmitRequest() request.ParseFromString(message_content) if not all( is_batch_signer_authorized(batch, allowed_pubkeys) for batch in request.batches): return make_response(response_proto.INVALID_BATCH) except DecodeError: return make_response(response_proto.INTERNAL_ERROR) return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): batch_request_message = network_pb2.GossipBatchByBatchIdRequest() batch_request_message.ParseFromString(message_content) batch = None batch = self._responder.check_for_batch(batch_request_message.id) node_id = batch_request_message.node_id if batch is None: # No batch found, broadcast original message to other peers # and add to pending requests self._responder.add_request(batch_request_message.id, connection_id) self._gossip.broadcast( batch_request_message, validator_pb2.Message.GOSSIP_BATCH_BY_BATCH_ID_REQUEST, exclude=[connection_id]) else: LOGGER.debug("Responding to batch requests %s", batch.header_signature) batch_response = network_pb2.GossipBatchResponse( content=batch.SerializeToString(), node_id=node_id) self._gossip.send(validator_pb2.Message.GOSSIP_BATCH_RESPONSE, batch_response.SerializeToString(), connection_id) return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): block_request_message = network_pb2.GossipBlockRequest() block_request_message.ParseFromString(message_content) block_id = block_request_message.block_id node_id = block_request_message.node_id block = self._responder.check_for_block(block_id) if block is None: # No block found, broadcast original message to other peers # and add to pending requests if block_id == "HEAD": LOGGER.debug("No chain head available. Cannot respond to block" " requests.") else: self._responder.add_request(block_id, connection_id) self._gossip.broadcast( block_request_message, validator_pb2.Message.GOSSIP_BLOCK_REQUEST, exclude=[connection_id]) else: LOGGER.debug("Responding to block requests: %s", block.get_block().header_signature) block_response = network_pb2.GossipBlockResponse( content=block.get_block().SerializeToString(), node_id=node_id) self._gossip.send(validator_pb2.Message.GOSSIP_BLOCK_RESPONSE, block_response.SerializeToString(), connection_id) return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): request = ClientBatchSubmitRequest() request.ParseFromString(message_content) for batch in request.batches: self._completer.add_batch(batch) self._gossip.broadcast_batch(batch) return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): batch_response = network_pb2.GossipBatchResponse() batch_response.ParseFromString(message_content) batch = batch_pb2.Batch() batch.ParseFromString(batch_response.content) open_request = self._responder.get_request(batch.header_signature) if open_request is None: open_request = [] requests_to_remove = [batch.header_signature] for txn in batch.transactions: requests_by_txn = self._responder.get_request(txn.header_signature) if requests_by_txn is not None: open_request += requests_by_txn requests_to_remove += [txn.header_signature] for connection in open_request: LOGGER.debug("Responding to batch requests: Send %s to %s", batch.header_signature, connection) self._gossip.send(validator_pb2.Message.GOSSIP_BATCH_RESPONSE, message_content, connection) for requested_id in requests_to_remove: self._responder.remove_request(requested_id) self._responder.purge_requests() return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): batch_response_message = GossipBatchResponse() batch_response_message.ParseFromString(message_content) batch = Batch() batch.ParseFromString(batch_response_message.content) status = validate_batch(batch) if status is True: LOGGER.debug("requested batch passes signature verification %s", batch.header_signature) return HandlerResult(status=HandlerStatus.PASS) else: LOGGER.debug("requested batch's signature is invalid: %s", batch.header_signature) return HandlerResult(status=HandlerStatus.DROP)
def handle(self, identity, message_content): request = client_pb2.ClientBlockGetRequest() resp_proto = client_pb2.ClientBlockGetResponse status = resp_proto.OK try: request.ParseFromString(message_content) block = self._block_store[request.block_id].block.get_block() except DecodeError: LOGGER.info( "Expected protobuf of class %s failed to " "deserialize", request) status = resp_proto.ERROR except KeyError as e: status = client_pb2.ClientStateListResponse.NORESOURCE LOGGER.info(e) if status != resp_proto.OK: response = resp_proto(status=status) else: response = resp_proto(status=status, block=block) return HandlerResult( status=HandlerStatus.RETURN, message_out=response, message_type=validator_pb2.Message.CLIENT_BLOCK_GET_RESPONSE)
def handle(self, connection_id, message_content): block_response_message = GossipBlockResponse() block_response_message.ParseFromString(message_content) block = Block() block.ParseFromString(block_response_message.content) status = validate_block(block) if status is True: LOGGER.debug("requested block passes signature verification %s", block.header_signature) return HandlerResult(status=HandlerStatus.PASS) else: LOGGER.debug("requested block's signature is invalid: %s", block.header_signature) return HandlerResult(status=HandlerStatus.DROP)
def handle(self, connection_id, message_content): response_proto = client_pb2.ClientBatchSubmitResponse def make_response(out_status): return HandlerResult( status=HandlerStatus.RETURN, message_out=response_proto(status=out_status), message_type=Message.CLIENT_BATCH_SUBMIT_RESPONSE) try: request = client_pb2.ClientBatchSubmitRequest() request.ParseFromString(message_content) for batch in request.batches: if batch.trace: LOGGER.debug("TRACE %s: %s", batch.header_signature, self.__class__.__name__) if not all( self._verifier.check_off_chain_batch_roles(batch) for batch in request.batches): return make_response(response_proto.INVALID_BATCH) if not all( self._verifier.is_batch_signer_authorized(batch) for batch in request.batches): return make_response(response_proto.INVALID_BATCH) except DecodeError: return make_response(response_proto.INTERNAL_ERROR) return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): batch_request_message = network_pb2.GossipBatchByBatchIdRequest() batch_request_message.ParseFromString(message_content) if batch_request_message.nonce in self._seen_requests: LOGGER.debug("Received repeat GossipBatchByBatchIdRequest from %s", connection_id) return HandlerResult(HandlerStatus.DROP) batch = None batch = self._responder.check_for_batch(batch_request_message.id) if batch is None: # No batch found, broadcast original message to other peers # and add to pending requests if not self._responder.already_requested(batch_request_message.id): if batch_request_message.time_to_live > 0: time_to_live = batch_request_message.time_to_live batch_request_message.time_to_live = time_to_live - 1 self._gossip.broadcast( batch_request_message, validator_pb2.Message.GOSSIP_BATCH_BY_BATCH_ID_REQUEST, exclude=[connection_id]) self._seen_requests[batch_request_message.nonce] = \ batch_request_message.id self._responder.add_request(batch_request_message.id, connection_id) else: LOGGER.debug("Batch %s has already been requested", batch_request_message.id) self._responder.add_request(batch_request_message.id, connection_id) else: LOGGER.debug("Responding to batch requests %s", batch.header_signature) batch_response = network_pb2.GossipBatchResponse( content=batch.SerializeToString(), ) self._gossip.send(validator_pb2.Message.GOSSIP_BATCH_RESPONSE, batch_response.SerializeToString(), connection_id) return HandlerResult(HandlerStatus.PASS)
def handle(self, connection_id, message_content): batch_response_message = GossipBatchResponse() batch_response_message.ParseFromString(message_content) batch = Batch() batch.ParseFromString(batch_response_message.content) if batch.header_signature in self._seen_cache: self._batch_dropped_count.inc() return HandlerResult(status=HandlerStatus.DROP) if not is_valid_batch(batch): LOGGER.debug("requested batch's signature is invalid: %s", batch.header_signature) return HandlerResult(status=HandlerStatus.DROP) self._seen_cache[batch.header_signature] = None return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): request = ClientEventsSubscribeRequest() request.ParseFromString(message_content) ack = ClientEventsSubscribeResponse() try: subscriptions = [ EventSubscription( event_type=sub.event_type, filters=[ self._filter_factory.create(f.key, f.match_string, f.filter_type) for f in sub.filters ], ) for sub in request.subscriptions ] except InvalidFilterError as err: LOGGER.warning("Invalid Filter Error: %s", err) ack.status = ack.INVALID_FILTER ack.response_message = str(err) return HandlerResult(HandlerStatus.RETURN, message_out=ack, message_type=self._msg_type) last_known_block_ids = list(request.last_known_block_ids) last_known_block_id = None if last_known_block_ids: try: last_known_block_id = \ self._event_broadcaster.get_latest_known_block_id( last_known_block_ids) except NoKnownBlockError as err: ack.status = ack.UNKNOWN_BLOCK ack.response_message = str(err) return HandlerResult(HandlerStatus.RETURN, message_out=ack, message_type=self._msg_type) self._event_broadcaster.add_subscriber(connection_id, subscriptions, last_known_block_id) ack.status = ack.OK return HandlerResult(HandlerStatus.RETURN_AND_PASS, message_out=ack, message_type=self._msg_type)
def handle(self, connection_id, message_content): batch, _ = message_content batch_id = batch.header_signature ack = NetworkAcknowledgement() ack.status = ack.OK if not self._has_open_requests(batch_id) and self._has_batch(batch_id): return HandlerResult( HandlerStatus.RETURN, message_out=ack, message_type=validator_pb2.Message.NETWORK_ACK) return HandlerResult(HandlerStatus.RETURN_AND_PASS, message_out=ack, message_type=validator_pb2.Message.NETWORK_ACK)
def handle(self, connection_id, message_content): obj, tag, _ = message_content if tag == network_pb2.GossipMessage.BLOCK: self._completer.add_block(obj) elif tag == network_pb2.GossipMessage.BATCH: self._completer.add_batch(obj) return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): for batch in message_content.batches: if batch.trace: LOGGER.debug("TRACE %s: %s", batch.header_signature, self.__class__.__name__) self._completer.add_batch(batch) self._gossip.broadcast_batch(batch) return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): request = consensus_pb2.ConsensusCheckBlocksRequest() try: request.ParseFromString(message_content) except DecodeError: LOGGER.exception("Unable to decode ConsensusCheckBlocksRequest") return HandlerResult(status=HandlerResult.DROP) block_statuses = self._proxy.get_block_statuses(request.block_ids) for (block_id, block_status) in block_statuses: if block_status == BlockStatus.Valid: self._consensus_notifier.notify_block_valid(block_id) elif block_status == BlockStatus.Invalid: self._consensus_notifier.notify_block_invalid(block_id) return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): obj, tag, _ = message_content if tag == GossipMessage.BLOCK: if obj.header_signature in self._seen_cache: self._block_dropped_count.inc() return HandlerResult(status=HandlerStatus.DROP) if not is_valid_block(obj): LOGGER.debug("block signature is invalid: %s", obj.header_signature) return HandlerResult(status=HandlerStatus.DROP) self._seen_cache[obj.header_signature] = None return HandlerResult(status=HandlerStatus.PASS) elif tag == GossipMessage.BATCH: if obj.header_signature in self._seen_cache: self._batch_dropped_count.inc() return HandlerResult(status=HandlerStatus.DROP) if not is_valid_batch(obj): LOGGER.debug("batch signature is invalid: %s", obj.header_signature) return HandlerResult(status=HandlerStatus.DROP) self._seen_cache[obj.header_signature] = None return HandlerResult(status=HandlerStatus.PASS) # should drop the message if it does not have a valid content_type return HandlerResult(status=HandlerStatus.DROP)
def handle(self, connection_id, message_content): # Attempt to catch the subscriber up with events try: self._event_broadcaster.catchup_subscriber(connection_id) except (PossibleForkDetectedError, NoKnownBlockError, KeyError) as err: LOGGER.warning("Failed to catchup subscriber: %s", err) self._event_broadcaster.enable_subscriber(connection_id) return HandlerResult(HandlerStatus.PASS)
def handle(self, connection_id, message_content): block_response_message = network_pb2.GossipBlockResponse() block_response_message.ParseFromString(message_content) block = Block() block.ParseFromString(block_response_message.content) self._completer.add_block(block) return HandlerResult(status=HandlerStatus.PASS)
def handle(self, connection_id, message_content): batch_response_message = network_pb2.GossipBatchResponse() batch_response_message.ParseFromString(message_content) batch = Batch() batch.ParseFromString(batch_response_message.content) self._completer.add_batch(batch) return HandlerResult(status=HandlerStatus.PASS)