async def send(conn, batch_list, timeout, webhook=False): """Send batch_list to sawtooth.""" batch_request = client_batch_submit_pb2.ClientBatchSubmitRequest() batch_request.batches.extend(list(batch_list.batches)) validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST, batch_request.SerializeToString(), timeout, ) client_response = client_batch_submit_pb2.ClientBatchSubmitResponse() client_response.ParseFromString(validator_response.content) status = client_response.status if not webhook: if status == client_batch_submit_pb2.ClientBatchSubmitResponse.INTERNAL_ERROR: raise ApiInternalError("Internal Error") elif status == client_batch_submit_pb2.ClientBatchSubmitResponse.INVALID_BATCH: raise ApiBadRequest("Invalid Batch") elif status == client_batch_submit_pb2.ClientBatchSubmitResponse.QUEUE_FULL: raise ApiInternalError("Queue Full") elif status != client_batch_submit_pb2.ClientBatchSubmitResponse.OK: return None status_request = client_batch_submit_pb2.ClientBatchStatusRequest() status_request.batch_ids.extend( list(b.header_signature for b in batch_list.batches)) status_request.wait = True status_request.timeout = timeout validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST, status_request.SerializeToString(), timeout, ) status_response = client_batch_submit_pb2.ClientBatchStatusResponse() status_response.ParseFromString(validator_response.content) status = status_response.status if not webhook: if status != client_batch_submit_pb2.ClientBatchStatusResponse.OK: raise ApiInternalError("Internal Error") elif status != client_batch_submit_pb2.ClientBatchStatusResponse.OK: return None response = status_response.batch_statuses[0] status = response.status if not webhook: if status == client_batch_submit_pb2.ClientBatchStatus.INVALID: raise ApiBadRequest("Bad Request: {}".format( response.invalid_transactions[0].message)) elif status == client_batch_submit_pb2.ClientBatchStatus.PENDING: raise ApiInternalError("Internal Error: Transaction timed out.") elif status == client_batch_submit_pb2.ClientBatchStatus.UNKNOWN: raise ApiInternalError("Internal Error: Unspecified error.") return status
async def send(conn, batch_list, timeout): batch_request = client_batch_submit_pb2.ClientBatchSubmitRequest() batch_request.batches.extend(list(batch_list.batches)) validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST, batch_request.SerializeToString(), timeout, ) client_response = client_batch_submit_pb2.ClientBatchSubmitResponse() client_response.ParseFromString(validator_response.content) if (client_response.status == client_batch_submit_pb2.ClientBatchSubmitResponse.INTERNAL_ERROR): raise ApiInternalError("Internal Error") elif (client_response.status == client_batch_submit_pb2.ClientBatchSubmitResponse.INVALID_BATCH): raise ApiBadRequest("Invalid Batch") elif (client_response.status == client_batch_submit_pb2.ClientBatchSubmitResponse.QUEUE_FULL): raise ApiInternalError("Queue Full") status_request = client_batch_submit_pb2.ClientBatchStatusRequest() status_request.batch_ids.extend( list(b.header_signature for b in batch_list.batches)) status_request.wait = True status_request.timeout = timeout validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST, status_request.SerializeToString(), timeout, ) status_response = client_batch_submit_pb2.ClientBatchStatusResponse() status_response.ParseFromString(validator_response.content) if status_response.status != client_batch_submit_pb2.ClientBatchStatusResponse.OK: raise ApiInternalError("Internal Error") resp = status_response.batch_statuses[0] if resp.status == client_batch_submit_pb2.ClientBatchStatus.COMMITTED: return resp elif resp.status == client_batch_submit_pb2.ClientBatchStatus.INVALID: raise ApiBadRequest("Bad Request: {}".format( resp.invalid_transactions[0].message)) elif resp.status == client_batch_submit_pb2.ClientBatchStatus.PENDING: raise ApiInternalError( "Internal Error: Transaction submitted but timed out.") elif resp.status == client_batch_submit_pb2.ClientBatchStatus.UNKNOWN: raise ApiInternalError( "Internal Error: Something went wrong. Try again later.")
def watch_batch(self, batch_id): # Setup a connection to the validator ctx = zmq.Context() socket = ctx.socket(zmq.DEALER) socket.connect(current_app.config['SAWTOOTH_VALIDATOR_URL']) # Construct the request request = client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=[batch_id], wait=True).SerializeToString() # Construct the message wrapper correlation_id = batch_id + uuid.uuid4( ).hex # This must be unique for all in-process requests msg = Message(correlation_id=correlation_id, message_type=Message.CLIENT_BATCH_STATUS_REQUEST, content=request) # Send the request socket.send_multipart([msg.SerializeToString()]) # Receive the response resp = socket.recv_multipart()[-1] # Parse the message wrapper msg = Message() msg.ParseFromString(resp) # Validate the response type if msg.message_type != Message.CLIENT_BATCH_STATUS_RESPONSE: current_app.logger.error("Unexpected response message type") return # Parse the response response = client_batch_submit_pb2.ClientBatchStatusResponse() response.ParseFromString(msg.content) # Validate the response status if response.status != client_batch_submit_pb2.ClientBatchSubmitResponse.OK: current_app.logger.error("watch batch status failed: {}".format( response.response_message)) return # Close the connection to the validator socket.close() return client_batch_submit_pb2.ClientBatchStatus.Status.Name( response.batch_statuses[0].status)
async def check_batch_status(conn, batch_ids): status_request = client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=batch_ids, wait=True) validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST, status_request.SerializeToString()) status_response = client_batch_submit_pb2.ClientBatchStatusResponse() status_response.ParseFromString(validator_response.content) batch_status = status_response.batch_statuses[0].status if batch_status == client_batch_submit_pb2.ClientBatchStatus.INVALID: invalid = status_response.batch_statuses[0].invalid_transactions[0] raise ApiBadRequest(invalid.message) elif batch_status == client_batch_submit_pb2.ClientBatchStatus.PENDING: raise ApiInternalError("Transaction submitted but timed out") elif batch_status == client_batch_submit_pb2.ClientBatchStatus.UNKNOWN: raise ApiInternalError("Something went wrong. Try again later")
def get_batch(self, batch_id): self._stream.wait_for_ready() future = self._stream.send( message_type=Message.CLIENT_BATCH_STATUS_REQUEST, content=client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=[batch_id], ).SerializeToString()) try: resp = future.result(ZMQ_CONNECTION_TIMEOUT).content except ValidatorConnectionError as vce: LOGGER.error('ZMQ error: %s' % vce) raise Exception('Failed with ZMQ interaction: {0}'.format(vce)) except (asyncio.TimeoutError, FutureTimeoutError): LOGGER.error(f'Task with batch_id {batch_id} timeouted') raise Exception('Timeout') batch_resp = client_batch_submit_pb2.ClientBatchStatusResponse() batch_resp.ParseFromString(resp) LOGGER.debug(f'Batch: {resp}') batch_resp_str = repr(batch_resp).replace("\n", "") LOGGER.debug(f'Batch parsed: {batch_resp_str}') hash_sum = hashlib.sha256(batch_resp.SerializeToString()).hexdigest() LOGGER.debug(f'got hashsum: {hash_sum}') data = message_to_dict(batch_resp) LOGGER.debug(f'data: {data}') try: batch_data = data['batch_statuses'][0] except IndexError: raise Exception(f'Batch with id "{batch_id}" not found') assert batch_id == batch_data['batch_id'], \ f'Batches not matched (req: {batch_id}, ' \ f'got: {batch_data["batch_id"]})' prep_resp = {'batch_statuses': batch_data} return prep_resp, hash_sum
def get_batch(self, batch_id): self._stream.wait_for_ready() future = self._stream.send( message_type=Message.CLIENT_BATCH_STATUS_REQUEST, content=client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=[batch_id], ).SerializeToString()) try: resp = future.result(10).content except ValidatorConnectionError as vce: LOGGER.error('ZMQ error: %s' % vce) raise Exception('Failed with ZMQ interaction: {0}'.format(vce)) except asyncio.TimeoutError: LOGGER.error(f'Task with batch_id {batch_id} timeouted') raise Exception('Timeout') batch_resp = client_batch_submit_pb2.ClientBatchStatusResponse() batch_resp.ParseFromString(resp) LOGGER.debug('Batch: %s', resp) LOGGER.info('Batch parsed: %s', batch_resp) hash_sum = hashlib.sha256(batch_resp.SerializeToString()).hexdigest() data = message_to_dict(batch_resp) LOGGER.debug('data: %s', data) try: batch_data = data['batch_statuses'][0] except IndexError: raise Exception(f'Batch with id "{batch_id}" not found') assert batch_id == batch_data['batch_id'], \ 'Batches not matched (req: {0}, got: {1})'.format(batch_id, batch_data['batch_id']) prep_resp = {'batch_statuses': batch_data} return prep_resp, hash_sum