def handle(self, connection_id, message_content): request = processor_pb2.TpRegisterRequest() request.ParseFromString(message_content) LOGGER.info( 'registered transaction processor: connection_id=%s, family=%s, ' 'version=%s, encoding=%s, namespaces=%s', connection_id, request.family, request.version, request.encoding, request.namespaces) processor_type = processor_iterator.ProcessorType( request.family, request.version, request.encoding) processor = processor_iterator.Processor(connection_id, request.namespaces) self._collection[processor_type] = processor ack = processor_pb2.TpRegisterResponse() ack.status = ack.OK return HandlerResult( status=HandlerStatus.RETURN, message_out=ack, message_type=validator_pb2.Message.TP_REGISTER_RESPONSE)
def handle(self, connection_id, message_content): request = processor_pb2.TpRegisterRequest() request.ParseFromString(message_content) LOGGER.info( 'registered transaction processor: connection_id=%s, family=%s, version=%s, namespaces=%s', connection_id[:8], request.family, request.version, list(request.namespaces)) processor_type = processor_iterator.ProcessorType( request.family, request.version) processor = processor_iterator.Processor(connection_id, request.namespaces) if processor_type in self._collection: LOGGER.debug( 'Already registered transaction processor:family=%s, version=%s, namespaces=%s', request.family, request.version, list(request.namespaces)) self._collection[processor_type] = processor LOGGER.debug('All registered transaction processors=%s', self._collection) ack = processor_pb2.TpRegisterResponse() ack.status = ack.OK return HandlerResult( status=HandlerStatus.RETURN, message_out=ack, message_type=validator_pb2.Message.TP_REGISTER_RESPONSE)
def _future_done_callback(self, request, result): """ :param request (bytes):the serialized request :param result (FutureResult): """ req = processor_pb2.TpProcessRequest() req.ParseFromString(request) response = processor_pb2.TpProcessResponse() response.ParseFromString(result.content) del self._open_futures[result.connection_id][req.signature] if response.status == processor_pb2.TpProcessResponse.OK: self._scheduler.set_transaction_execution_result( req.signature, True, req.context_id) elif response.status == processor_pb2.TpProcessResponse.INTERNAL_ERROR: header = transaction_pb2.TransactionHeader() header.ParseFromString(req.header) processor_type = processor_iterator.ProcessorType( header.family_name, header.family_version, header.payload_encoding) self._execute_or_wait_for_processor_type(processor_type, request, req.signature) else: self._context_manager.delete_contexts( context_id_list=[req.context_id]) self._scheduler.set_transaction_execution_result( req.signature, False, req.context_id)
def register_transaction_processor(self, sender, family, version, encoding, namespaces): processor_type = processor_iterator.ProcessorType( family, version, encoding) processor = processor_iterator.Processor(sender, namespaces) self._processors[processor_type] = processor
def run(self): for txn_info in self._scheduler: txn = txn_info.txn header = transaction_pb2.TransactionHeader() header.ParseFromString(txn.header) context_id = self._context_manager.create_context( txn_info.state_hash, inputs=list(header.inputs), outputs=list(header.outputs)) content = processor_pb2.TpProcessRequest( header=txn.header, payload=txn.payload, signature=txn.header_signature, context_id=context_id).SerializeToString() processor_type = processor_iterator.ProcessorType( header.family_name, header.family_version, header.payload_encoding) # Currently we only check for the sawtooth_config txn family, # as it is the only family we know to require. if self._require_txn_processors and \ header.family_name == 'sawtooth_config' and \ processor_type not in self._processors: # wait until required processor is registered: LOGGER.info('Waiting for transaction processor (%s, %s, %s)', header.family_name, header.family_version, header.payload_encoding) self._processors.wait_to_process(processor_type) if processor_type not in self._processors: raise Exception("internal error, no processor available") processor = self._processors[processor_type] identity = processor.identity future = self._service.send( validator_pb2.Message.TP_PROCESS_REQUEST, content, identity=identity, has_callback=True) future.add_callback(self._future_done_callback)
def send_txn(self, header, message): family_name = header.family_name family_version = header.family_version encoding = header.payload_encoding processor_type = processor_iterator.ProcessorType(family_name, family_version, encoding) if processor_type not in self._processors: raise Exception("internal error, no processor available") processor = self._processors[processor_type] message.sender = processor.sender fut = future.Future(message.correlation_id, message.content, has_callback=True) self._futures.put(fut) self._send_receive_thread.send_message(message) return fut
def execute_thread(self): for txn_info in self._scheduler: txn = txn_info.txn header = transaction_pb2.TransactionHeader() header.ParseFromString(txn.header) processor_type = processor_iterator.ProcessorType( header.family_name, header.family_version, header.payload_encoding) config = self._settings_view_factory.create_settings_view( txn_info.state_hash) transaction_families = config.get_setting( key=self._tp_settings_key, default_value="[]") # After reading the transaction families required in configuration # try to json.loads them into a python object # If there is a misconfiguration, proceed as if there is no # configuration. try: transaction_families = json.loads(transaction_families) required_transaction_processors = [ processor_iterator.ProcessorType(d.get('family'), d.get('version'), d.get('encoding')) for d in transaction_families ] except ValueError: LOGGER.warning( "sawtooth.validator.transaction_families " "misconfigured. Expecting a json array, found" " %s", transaction_families) required_transaction_processors = [] # First check if the transaction should be failed # based on configuration if required_transaction_processors and \ processor_type not in required_transaction_processors: # The txn processor type is not in the required # transaction processors so # failing transaction right away LOGGER.debug( "failing transaction %s of type (name=%s," "version=%s,encoding=%s) since it isn't" " required in the configuration", txn.header_signature, processor_type.name, processor_type.version, processor_type.encoding) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue try: context_id = self._context_manager.create_context( state_hash=txn_info.state_hash, base_contexts=txn_info.base_context_ids, inputs=list(header.inputs), outputs=list(header.outputs)) except KeyError: LOGGER.warning( "Error creating context for transaction %s, " "scheduler provided a base context that was not " "in the context manager.", txn.header_signature) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue except CreateContextException as cce: LOGGER.info("Exception creating context: %s", cce) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue content = processor_pb2.TpProcessRequest( header=txn.header, payload=txn.payload, signature=txn.header_signature, context_id=context_id).SerializeToString() # Since we have already checked if the transaction should be failed # all other cases should either be executed or waited for. self._execute_or_wait_for_processor_type( processor_type=processor_type, content=content, signature=txn.header_signature) self._done = True
def _future_done_callback(self, request, result): """ :param request (bytes):the serialized request :param result (FutureResult): """ req = processor_pb2.TpProcessRequest() req.ParseFromString(request) response = processor_pb2.TpProcessResponse() response.ParseFromString(result.content) if result.connection_id in self._open_futures and \ req.signature in self._open_futures[result.connection_id]: del self._open_futures[result.connection_id][req.signature] if response.status == processor_pb2.TpProcessResponse.OK: state_sets, state_deletes, events, data = \ self._context_manager.get_execution_results(req.context_id) state_changes = [ transaction_receipt_pb2.StateChange( address=addr, value=value, type=transaction_receipt_pb2.StateChange.SET) for addr, value in state_sets.items() ] + [ transaction_receipt_pb2.StateChange( address=addr, type=transaction_receipt_pb2.StateChange.DELETE) for addr in state_deletes ] self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=True, context_id=req.context_id, state_changes=state_changes, events=events, data=data) elif response.status == processor_pb2.TpProcessResponse.INTERNAL_ERROR: header = transaction_pb2.TransactionHeader() header.ParseFromString(req.header) processor_type = processor_iterator.ProcessorType( header.family_name, header.family_version) self._execute_or_wait_for_processor_type(processor_type, request, req.signature) else: self._context_manager.delete_contexts( context_id_list=[req.context_id]) self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=False, context_id=req.context_id, error_message=response.message, error_data=response.extended_data) for observer in self._invalid_observers: observer.notify_txn_invalid(req.signature, response.message, response.extended_data)
def _execute_schedule(self): LOGGER.debug("execute_thread: ...") for txn_info in self._scheduler: self._transaction_execution_count.inc() txn = txn_info.txn header = transaction_pb2.TransactionHeader() header.ParseFromString(txn.header) LOGGER.debug("_execute_schedule:tnx family_name=%s ", header.family_name) processor_type = processor_iterator.ProcessorType( header.family_name, header.family_version) config = self._settings_view_factory.create_settings_view( txn_info.state_hash) transaction_families = config.get_setting( key=self._tp_settings_key, default_value="[]") # After reading the transaction families required in configuration # try to json.loads them into a python object # If there is a misconfiguration, proceed as if there is no # configuration. try: transaction_families = json.loads(transaction_families) required_transaction_processors = [ processor_iterator.ProcessorType(d.get('family'), d.get('version')) for d in transaction_families ] except ValueError: LOGGER.warning( "sawtooth.validator.transaction_families " "misconfigured. Expecting a json array, found" " %s", transaction_families) required_transaction_processors = [] # First check if the transaction should be failed # based on configuration if required_transaction_processors and \ processor_type not in required_transaction_processors: # The txn processor type is not in the required # transaction processors so # failing transaction right away LOGGER.debug( "failing transaction %s of type (name=%s," "version=%s) since it isn't" " required in the configuration", txn.header_signature, processor_type.name, processor_type.version) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue if processor_type in required_transaction_processors: # The txn processor type is in the required # transaction processors: check all the outputs of # the transaction match one namespace listed transaction_family = \ next(t for t in transaction_families if t.get('family') == header.family_name and t.get('version') == header.family_version) # if no namespaces are indicated, then the empty prefix is # inserted by default namespaces = transaction_family.get('namespaces', ['']) if not isinstance(namespaces, list): LOGGER.warning( "namespaces should be a list for " "transaction family (name=%s, version=%s)", processor_type.name, processor_type.version) prefixes = header.outputs bad_prefixes = [ prefix for prefix in prefixes if not any(prefix.startswith(n) for n in namespaces) ] for prefix in bad_prefixes: # log each LOGGER.debug( "failing transaction %s of type (name=%s," "version=%s) because of no namespace listed " "in %s from the configuration settings can " "match the prefix %s", txn.header_signature, processor_type.name, processor_type.version, namespaces, prefix) if bad_prefixes: self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue try: LOGGER.debug('_execute_schedule: STATE=%s\n', txn_info.state_hash[:10]) context_id = self._context_manager.create_context( state_hash=txn_info.state_hash, base_contexts=txn_info.base_context_ids, inputs=list(header.inputs), outputs=list(header.outputs)) except KeyError: LOGGER.warning( "Error creating context for transaction %s, scheduler provided a base context that was not in the context manager.", txn.header_signature) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue except CreateContextException as cce: LOGGER.info("Exception creating context: %s", cce) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue # send request for tnx processor LOGGER.debug("Execute schedule:send tnx=%s to tnx processor(%s)", txn.header_signature[:8], txn.payload) content = processor_pb2.TpProcessRequest( header=header, payload=txn.payload, signature=txn.header_signature, context_id=context_id).SerializeToString() # Since we have already checked if the transaction should be failed # all other cases should either be executed or waited for. self._execute_or_wait_for_processor_type( processor_type=processor_type, content=content, signature=txn.header_signature) self._done = True
def _future_done_callback(self, request, result): """ :param request (bytes):the serialized request :param result (FutureResult): """ req = processor_pb2.TpProcessRequest() req.ParseFromString(request) response = processor_pb2.TpProcessResponse() response.ParseFromString(result.content) self._get_tp_process_response_counter( response.Status.Name(response.status)).inc() if result.connection_id in self._open_futures and \ req.signature in self._open_futures[result.connection_id]: del self._open_futures[result.connection_id][req.signature] if response.status == processor_pb2.TpProcessResponse.OK: LOGGER.debug("_future_done_callback: processor Response.OK tnx=%s", req.signature[:8]) state_sets, state_deletes, events, data = self._context_manager.get_execution_results( req.context_id) state_changes = [ transaction_receipt_pb2.StateChange( address=addr, value=value, type=transaction_receipt_pb2.StateChange.SET) for addr, value in state_sets.items() ] + [ transaction_receipt_pb2.StateChange( address=addr, type=transaction_receipt_pb2.StateChange.DELETE) for addr in state_deletes ] #LOGGER.debug("_future_done_callback:set_transaction_execution_result changes=%s",state_changes) self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=True, context_id=req.context_id, state_changes=state_changes, events=events, data=data) LOGGER.debug( "_future_done_callback: processor Response.OK tnx=%s DONE", req.signature[:8]) elif response.status == processor_pb2.TpProcessResponse.INTERNAL_ERROR: LOGGER.debug( "_future_done_callback: processor Response.INTERNAL_ERROR tnx=%s", req.signature[:8]) processor_type = processor_iterator.ProcessorType( req.header.family_name, req.header.family_version) self._execute_or_wait_for_processor_type(processor_type, request, req.signature) else: if self._malicious == 2: # say that it was correct LOGGER.debug( "_future_done_callback: processor Response.MALICIOUS tnx=%s(%s)", req.signature[:8], response.message) state_sets, state_deletes, events, data = self._context_manager.get_execution_results( req.context_id) state_changes = [ transaction_receipt_pb2.StateChange( address=addr, value=value, type=transaction_receipt_pb2.StateChange.SET) for addr, value in state_sets.items() ] + [ transaction_receipt_pb2.StateChange( address=addr, type=transaction_receipt_pb2.StateChange.DELETE) for addr in state_deletes ] self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=True, context_id=req.context_id, state_changes=state_changes, events=events, data=data) return LOGGER.debug( "_future_done_callback: processor mal=%s Response.INVALID tnx=%s(%s)", self._malicious, req.signature[:8], response.message) self._context_manager.delete_contexts( context_id_list=[req.context_id]) self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=False, context_id=req.context_id, error_message=response.message, error_data=response.extended_data) for observer in self._invalid_observers: observer.notify_txn_invalid(req.signature, response.message, response.extended_data)