def _future_done_callback(self, request, result): """ :param request (bytes):the serialized request :param result (FutureResult): """ req = processor_pb2.TpProcessRequest() req.ParseFromString(request) response = processor_pb2.TpProcessResponse() response.ParseFromString(result.content) del self._open_futures[result.connection_id][req.signature] if response.status == processor_pb2.TpProcessResponse.OK: self._scheduler.set_transaction_execution_result( req.signature, True, req.context_id) elif response.status == processor_pb2.TpProcessResponse.INTERNAL_ERROR: header = transaction_pb2.TransactionHeader() header.ParseFromString(req.header) processor_type = processor_iterator.ProcessorType( header.family_name, header.family_version, header.payload_encoding) self._execute_or_wait_for_processor_type(processor_type, request, req.signature) else: self._context_manager.delete_contexts( context_id_list=[req.context_id]) self._scheduler.set_transaction_execution_result( req.signature, False, req.context_id)
def _future_done_callback(self, request, result): """ :param request (bytes):the serialized request :param result (FutureResult): """ req = processor_pb2.TpProcessRequest() req.ParseFromString(request) response = processor_pb2.TpProcessResponse() response.ParseFromString(result.content) if response.status == processor_pb2.TpProcessResponse.OK: self._scheduler.set_status(req.signature, True, req.context_id) else: self._context_manager.delete_context( context_id_list=[req.context_id]) self._scheduler.set_status(req.signature, False, req.context_id)
def run(self): for txn_info in self._scheduler: txn = txn_info.txn header = transaction_pb2.TransactionHeader() header.ParseFromString(txn.header) context_id = self._context_manager.create_context( txn_info.state_hash, inputs=list(header.inputs), outputs=list(header.outputs)) content = processor_pb2.TpProcessRequest( header=txn.header, payload=txn.payload, signature=txn.header_signature, context_id=context_id).SerializeToString() processor_type = processor_iterator.ProcessorType( header.family_name, header.family_version, header.payload_encoding) # Currently we only check for the sawtooth_config txn family, # as it is the only family we know to require. if self._require_txn_processors and \ header.family_name == 'sawtooth_config' and \ processor_type not in self._processors: # wait until required processor is registered: LOGGER.info('Waiting for transaction processor (%s, %s, %s)', header.family_name, header.family_version, header.payload_encoding) self._processors.wait_to_process(processor_type) if processor_type not in self._processors: raise Exception("internal error, no processor available") processor = self._processors[processor_type] identity = processor.identity future = self._service.send( validator_pb2.Message.TP_PROCESS_REQUEST, content, identity=identity, has_callback=True) future.add_callback(self._future_done_callback)
def run(self): for txn_info in self._scheduler: txn = txn_info.txn header = transaction_pb2.TransactionHeader() header.ParseFromString(txn.header) context_id = self._context_manager.create_context( txn_info.state_hash, inputs=list(header.inputs), outputs=list(header.outputs)) content = processor_pb2.TpProcessRequest( header=txn.header, payload=txn.payload, signature=txn.header_signature, context_id=context_id).SerializeToString() message = validator_pb2.Message( message_type=validator_pb2.Message.TP_PROCESS_REQUEST, correlation_id=_generate_id(), content=content) future = self._service.send_txn(header=header, message=message) future.add_callback(self._future_done_callback)
def _future_done_callback(self, request, result): """ :param request (bytes):the serialized request :param result (FutureResult): """ self._in_process_transactions_count.dec() req = processor_pb2.TpProcessRequest() req.ParseFromString(request) response = processor_pb2.TpProcessResponse() response.ParseFromString(result.content) processor_type = ProcessorType( req.header.family_name, req.header.family_version) self._processor_manager[processor_type].get_processor( result.connection_id).dec_occupancy() self._processor_manager.notify() self._get_tp_process_response_counter( response.Status.Name(response.status)).inc() if result.connection_id in self._open_futures and \ req.signature in self._open_futures[result.connection_id]: del self._open_futures[result.connection_id][req.signature] if response.status == processor_pb2.TpProcessResponse.OK: state_sets, state_deletes, events, data = \ self._context_manager.get_execution_results(req.context_id) state_changes = [ transaction_receipt_pb2.StateChange( address=addr, value=value, type=transaction_receipt_pb2.StateChange.SET) for addr, value in state_sets.items() ] + [ transaction_receipt_pb2.StateChange( address=addr, type=transaction_receipt_pb2.StateChange.DELETE) for addr in state_deletes ] self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=True, context_id=req.context_id, state_changes=state_changes, events=events, data=data) elif response.status == processor_pb2.TpProcessResponse.INTERNAL_ERROR: LOGGER.error( "Transaction processor internal error: %s " "(transaction: %s, name: %s, version: %s)", response.message, req.signature, req.header.family_name, req.header.family_version) # Make sure that the transaction wasn't unscheduled in the interim if self._scheduler.is_transaction_in_schedule(req.signature): self._execute( processor_type=processor_type, content=request, signature=req.signature) else: self._context_manager.delete_contexts( context_id_list=[req.context_id]) self._fail_transaction( txn_signature=req.signature, context_id=req.context_id, error_message=response.message, error_data=response.extended_data)
def _execute_schedule(self): for txn_info in self._scheduler: self._transaction_execution_count.inc() txn = txn_info.txn header = transaction_pb2.TransactionHeader() header.ParseFromString(txn.header) processor_type = ProcessorType( header.family_name, header.family_version) config = self._settings_view_factory.create_settings_view( txn_info.state_hash) transaction_families = config.get_setting( key=self._tp_settings_key, default_value="[]") # After reading the transaction families required in configuration # try to json.loads them into a python object # If there is a misconfiguration, proceed as if there is no # configuration. try: transaction_families = json.loads(transaction_families) required_transaction_processors = [ ProcessorType( d.get('family'), d.get('version')) for d in transaction_families] except ValueError: LOGGER.error("sawtooth.validator.transaction_families " "misconfigured. Expecting a json array, found" " %s", transaction_families) required_transaction_processors = [] # First check if the transaction should be failed # based on configuration if required_transaction_processors and \ processor_type not in required_transaction_processors: # The txn processor type is not in the required # transaction processors so # failing transaction right away LOGGER.debug("failing transaction %s of type (name=%s," "version=%s) since it isn't" " required in the configuration", txn.header_signature, processor_type.name, processor_type.version) self._fail_transaction(txn.header_signature) continue if processor_type in required_transaction_processors: # The txn processor type is in the required # transaction processors: check all the outputs of # the transaction match one namespace listed transaction_family = \ next(t for t in transaction_families if t.get('family') == header.family_name and t.get('version') == header.family_version) # if no namespaces are indicated, then the empty prefix is # inserted by default namespaces = transaction_family.get('namespaces', ['']) if not isinstance(namespaces, list): LOGGER.error("namespaces should be a list for " "transaction family (name=%s, version=%s)", processor_type.name, processor_type.version) prefixes = header.outputs bad_prefixes = [ prefix for prefix in prefixes if not any(prefix.startswith(n) for n in namespaces) ] for prefix in bad_prefixes: # log each LOGGER.debug("failing transaction %s of type (name=%s," "version=%s) because of no namespace listed " "in %s from the configuration settings can " "match the prefix %s", txn.header_signature, processor_type.name, processor_type.version, namespaces, prefix) if bad_prefixes: self._fail_transaction(txn.header_signature) continue try: context_id = self._context_manager.create_context( state_hash=txn_info.state_hash, base_contexts=txn_info.base_context_ids, inputs=list(header.inputs), outputs=list(header.outputs)) except KeyError: LOGGER.error( "Error creating context for transaction %s, " "scheduler provided a base context that was not " "in the context manager.", txn.header_signature) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue except CreateContextException: LOGGER.exception("Exception creating context") self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue content = processor_pb2.TpProcessRequest( header=header, payload=txn.payload, signature=txn.header_signature, context_id=context_id).SerializeToString() # Since we have already checked if the transaction should be failed # all other cases should either be executed or waited for. self._execute( processor_type=processor_type, content=content, signature=txn.header_signature) self._done = True
def execute_thread(self): for txn_info in self._scheduler: txn = txn_info.txn header = transaction_pb2.TransactionHeader() header.ParseFromString(txn.header) processor_type = processor_iterator.ProcessorType( header.family_name, header.family_version, header.payload_encoding) config = self._settings_view_factory.create_settings_view( txn_info.state_hash) transaction_families = config.get_setting( key=self._tp_settings_key, default_value="[]") # After reading the transaction families required in configuration # try to json.loads them into a python object # If there is a misconfiguration, proceed as if there is no # configuration. try: transaction_families = json.loads(transaction_families) required_transaction_processors = [ processor_iterator.ProcessorType(d.get('family'), d.get('version'), d.get('encoding')) for d in transaction_families ] except ValueError: LOGGER.warning( "sawtooth.validator.transaction_families " "misconfigured. Expecting a json array, found" " %s", transaction_families) required_transaction_processors = [] # First check if the transaction should be failed # based on configuration if required_transaction_processors and \ processor_type not in required_transaction_processors: # The txn processor type is not in the required # transaction processors so # failing transaction right away LOGGER.debug( "failing transaction %s of type (name=%s," "version=%s,encoding=%s) since it isn't" " required in the configuration", txn.header_signature, processor_type.name, processor_type.version, processor_type.encoding) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue try: context_id = self._context_manager.create_context( state_hash=txn_info.state_hash, base_contexts=txn_info.base_context_ids, inputs=list(header.inputs), outputs=list(header.outputs)) except KeyError: LOGGER.warning( "Error creating context for transaction %s, " "scheduler provided a base context that was not " "in the context manager.", txn.header_signature) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue except CreateContextException as cce: LOGGER.info("Exception creating context: %s", cce) self._scheduler.set_transaction_execution_result( txn_signature=txn.header_signature, is_valid=False, context_id=None) continue content = processor_pb2.TpProcessRequest( header=txn.header, payload=txn.payload, signature=txn.header_signature, context_id=context_id).SerializeToString() # Since we have already checked if the transaction should be failed # all other cases should either be executed or waited for. self._execute_or_wait_for_processor_type( processor_type=processor_type, content=content, signature=txn.header_signature) self._done = True
def _future_done_callback(self, request, result): """ :param request (bytes):the serialized request :param result (FutureResult): """ req = processor_pb2.TpProcessRequest() req.ParseFromString(request) response = processor_pb2.TpProcessResponse() response.ParseFromString(result.content) if result.connection_id in self._open_futures and \ req.signature in self._open_futures[result.connection_id]: del self._open_futures[result.connection_id][req.signature] if response.status == processor_pb2.TpProcessResponse.OK: state_sets, state_deletes, events, data = \ self._context_manager.get_execution_results(req.context_id) state_changes = [ transaction_receipt_pb2.StateChange( address=addr, value=value, type=transaction_receipt_pb2.StateChange.SET) for addr, value in state_sets.items() ] + [ transaction_receipt_pb2.StateChange( address=addr, type=transaction_receipt_pb2.StateChange.DELETE) for addr in state_deletes ] self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=True, context_id=req.context_id, state_changes=state_changes, events=events, data=data) elif response.status == processor_pb2.TpProcessResponse.INTERNAL_ERROR: header = transaction_pb2.TransactionHeader() header.ParseFromString(req.header) processor_type = processor_iterator.ProcessorType( header.family_name, header.family_version) self._execute_or_wait_for_processor_type(processor_type, request, req.signature) else: self._context_manager.delete_contexts( context_id_list=[req.context_id]) self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=False, context_id=req.context_id, error_message=response.message, error_data=response.extended_data) for observer in self._invalid_observers: observer.notify_txn_invalid(req.signature, response.message, response.extended_data)
def _future_done_callback(self, request, result): """ :param request (bytes):the serialized request :param result (FutureResult): """ req = processor_pb2.TpProcessRequest() req.ParseFromString(request) response = processor_pb2.TpProcessResponse() response.ParseFromString(result.content) self._get_tp_process_response_counter( response.Status.Name(response.status)).inc() if result.connection_id in self._open_futures and \ req.signature in self._open_futures[result.connection_id]: del self._open_futures[result.connection_id][req.signature] if response.status == processor_pb2.TpProcessResponse.OK: LOGGER.debug("_future_done_callback: processor Response.OK tnx=%s", req.signature[:8]) state_sets, state_deletes, events, data = self._context_manager.get_execution_results( req.context_id) state_changes = [ transaction_receipt_pb2.StateChange( address=addr, value=value, type=transaction_receipt_pb2.StateChange.SET) for addr, value in state_sets.items() ] + [ transaction_receipt_pb2.StateChange( address=addr, type=transaction_receipt_pb2.StateChange.DELETE) for addr in state_deletes ] #LOGGER.debug("_future_done_callback:set_transaction_execution_result changes=%s",state_changes) self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=True, context_id=req.context_id, state_changes=state_changes, events=events, data=data) LOGGER.debug( "_future_done_callback: processor Response.OK tnx=%s DONE", req.signature[:8]) elif response.status == processor_pb2.TpProcessResponse.INTERNAL_ERROR: LOGGER.debug( "_future_done_callback: processor Response.INTERNAL_ERROR tnx=%s", req.signature[:8]) processor_type = processor_iterator.ProcessorType( req.header.family_name, req.header.family_version) self._execute_or_wait_for_processor_type(processor_type, request, req.signature) else: if self._malicious == 2: # say that it was correct LOGGER.debug( "_future_done_callback: processor Response.MALICIOUS tnx=%s(%s)", req.signature[:8], response.message) state_sets, state_deletes, events, data = self._context_manager.get_execution_results( req.context_id) state_changes = [ transaction_receipt_pb2.StateChange( address=addr, value=value, type=transaction_receipt_pb2.StateChange.SET) for addr, value in state_sets.items() ] + [ transaction_receipt_pb2.StateChange( address=addr, type=transaction_receipt_pb2.StateChange.DELETE) for addr in state_deletes ] self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=True, context_id=req.context_id, state_changes=state_changes, events=events, data=data) return LOGGER.debug( "_future_done_callback: processor mal=%s Response.INVALID tnx=%s(%s)", self._malicious, req.signature[:8], response.message) self._context_manager.delete_contexts( context_id_list=[req.context_id]) self._scheduler.set_transaction_execution_result( txn_signature=req.signature, is_valid=False, context_id=req.context_id, error_message=response.message, error_data=response.extended_data) for observer in self._invalid_observers: observer.notify_txn_invalid(req.signature, response.message, response.extended_data)