async def list_statuses(self, request): """Fetches the committed status of batches by either a POST or GET. Request: body: A JSON array of one or more id strings (if POST) query: - id: A comma separated list of up to 15 ids (if GET) - wait: Request should not return until all batches committed Response: data: A JSON object, with batch ids as keys, and statuses as values link: The /batch_statuses link queried (if GET) """ error_traps = [error_handlers.StatusResponseMissing] # Parse batch ids from POST body, or query paramaters if request.method == 'POST': if request.headers['Content-Type'] != 'application/json': LOGGER.debug('Request headers had wrong Content-Type: %s', request.headers['Content-Type']) raise errors.StatusWrongContentType() ids = await request.json() if (not ids or not isinstance(ids, list) or not all(isinstance(i, str) for i in ids)): LOGGER.debug('Request body was invalid: %s', ids) raise errors.StatusBodyInvalid() for i in ids: self._validate_id(i) else: ids = self._get_filter_ids(request) if not ids: LOGGER.debug('Request for statuses missing id query') raise errors.StatusIdQueryInvalid() # Query validator validator_query = \ client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=ids) self._set_wait(request, validator_query) response = await self._query_validator( Message.CLIENT_BATCH_STATUS_REQUEST, client_batch_submit_pb2.ClientBatchStatusResponse, validator_query, error_traps) # Send response if request.method != 'POST': metadata = self._get_metadata(request, response) else: metadata = None data = self._drop_id_prefixes( self._drop_empty_props(response['batch_statuses'])) return self._wrap_response(request, data=data, metadata=metadata)
async def send(conn, batch_list, timeout, webhook=False): """Send batch_list to sawtooth.""" batch_request = client_batch_submit_pb2.ClientBatchSubmitRequest() batch_request.batches.extend(list(batch_list.batches)) validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST, batch_request.SerializeToString(), timeout, ) client_response = client_batch_submit_pb2.ClientBatchSubmitResponse() client_response.ParseFromString(validator_response.content) status = client_response.status if not webhook: if status == client_batch_submit_pb2.ClientBatchSubmitResponse.INTERNAL_ERROR: raise ApiInternalError("Internal Error") elif status == client_batch_submit_pb2.ClientBatchSubmitResponse.INVALID_BATCH: raise ApiBadRequest("Invalid Batch") elif status == client_batch_submit_pb2.ClientBatchSubmitResponse.QUEUE_FULL: raise ApiInternalError("Queue Full") elif status != client_batch_submit_pb2.ClientBatchSubmitResponse.OK: return None status_request = client_batch_submit_pb2.ClientBatchStatusRequest() status_request.batch_ids.extend( list(b.header_signature for b in batch_list.batches)) status_request.wait = True status_request.timeout = timeout validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST, status_request.SerializeToString(), timeout, ) status_response = client_batch_submit_pb2.ClientBatchStatusResponse() status_response.ParseFromString(validator_response.content) status = status_response.status if not webhook: if status != client_batch_submit_pb2.ClientBatchStatusResponse.OK: raise ApiInternalError("Internal Error") elif status != client_batch_submit_pb2.ClientBatchStatusResponse.OK: return None response = status_response.batch_statuses[0] status = response.status if not webhook: if status == client_batch_submit_pb2.ClientBatchStatus.INVALID: raise ApiBadRequest("Bad Request: {}".format( response.invalid_transactions[0].message)) elif status == client_batch_submit_pb2.ClientBatchStatus.PENDING: raise ApiInternalError("Internal Error: Transaction timed out.") elif status == client_batch_submit_pb2.ClientBatchStatus.UNKNOWN: raise ApiInternalError("Internal Error: Unspecified error.") return status
async def send(conn, batch_list, timeout): batch_request = client_batch_submit_pb2.ClientBatchSubmitRequest() batch_request.batches.extend(list(batch_list.batches)) validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST, batch_request.SerializeToString(), timeout, ) client_response = client_batch_submit_pb2.ClientBatchSubmitResponse() client_response.ParseFromString(validator_response.content) if (client_response.status == client_batch_submit_pb2.ClientBatchSubmitResponse.INTERNAL_ERROR): raise ApiInternalError("Internal Error") elif (client_response.status == client_batch_submit_pb2.ClientBatchSubmitResponse.INVALID_BATCH): raise ApiBadRequest("Invalid Batch") elif (client_response.status == client_batch_submit_pb2.ClientBatchSubmitResponse.QUEUE_FULL): raise ApiInternalError("Queue Full") status_request = client_batch_submit_pb2.ClientBatchStatusRequest() status_request.batch_ids.extend( list(b.header_signature for b in batch_list.batches)) status_request.wait = True status_request.timeout = timeout validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST, status_request.SerializeToString(), timeout, ) status_response = client_batch_submit_pb2.ClientBatchStatusResponse() status_response.ParseFromString(validator_response.content) if status_response.status != client_batch_submit_pb2.ClientBatchStatusResponse.OK: raise ApiInternalError("Internal Error") resp = status_response.batch_statuses[0] if resp.status == client_batch_submit_pb2.ClientBatchStatus.COMMITTED: return resp elif resp.status == client_batch_submit_pb2.ClientBatchStatus.INVALID: raise ApiBadRequest("Bad Request: {}".format( resp.invalid_transactions[0].message)) elif resp.status == client_batch_submit_pb2.ClientBatchStatus.PENDING: raise ApiInternalError( "Internal Error: Transaction submitted but timed out.") elif resp.status == client_batch_submit_pb2.ClientBatchStatus.UNKNOWN: raise ApiInternalError( "Internal Error: Something went wrong. Try again later.")
def watch_batch(self, batch_id): # Setup a connection to the validator ctx = zmq.Context() socket = ctx.socket(zmq.DEALER) socket.connect(current_app.config['SAWTOOTH_VALIDATOR_URL']) # Construct the request request = client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=[batch_id], wait=True).SerializeToString() # Construct the message wrapper correlation_id = batch_id + uuid.uuid4( ).hex # This must be unique for all in-process requests msg = Message(correlation_id=correlation_id, message_type=Message.CLIENT_BATCH_STATUS_REQUEST, content=request) # Send the request socket.send_multipart([msg.SerializeToString()]) # Receive the response resp = socket.recv_multipart()[-1] # Parse the message wrapper msg = Message() msg.ParseFromString(resp) # Validate the response type if msg.message_type != Message.CLIENT_BATCH_STATUS_RESPONSE: current_app.logger.error("Unexpected response message type") return # Parse the response response = client_batch_submit_pb2.ClientBatchStatusResponse() response.ParseFromString(msg.content) # Validate the response status if response.status != client_batch_submit_pb2.ClientBatchSubmitResponse.OK: current_app.logger.error("watch batch status failed: {}".format( response.response_message)) return # Close the connection to the validator socket.close() return client_batch_submit_pb2.ClientBatchStatus.Status.Name( response.batch_statuses[0].status)
async def check_batch_status(conn, batch_ids): status_request = client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=batch_ids, wait=True) validator_response = await conn.send( validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST, status_request.SerializeToString()) status_response = client_batch_submit_pb2.ClientBatchStatusResponse() status_response.ParseFromString(validator_response.content) batch_status = status_response.batch_statuses[0].status if batch_status == client_batch_submit_pb2.ClientBatchStatus.INVALID: invalid = status_response.batch_statuses[0].invalid_transactions[0] raise ApiBadRequest(invalid.message) elif batch_status == client_batch_submit_pb2.ClientBatchStatus.PENDING: raise ApiInternalError("Transaction submitted but timed out") elif batch_status == client_batch_submit_pb2.ClientBatchStatus.UNKNOWN: raise ApiInternalError("Something went wrong. Try again later")
def get_batch(self, batch_id): self._stream.wait_for_ready() future = self._stream.send( message_type=Message.CLIENT_BATCH_STATUS_REQUEST, content=client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=[batch_id], ).SerializeToString()) try: resp = future.result(ZMQ_CONNECTION_TIMEOUT).content except ValidatorConnectionError as vce: LOGGER.error('ZMQ error: %s' % vce) raise Exception('Failed with ZMQ interaction: {0}'.format(vce)) except (asyncio.TimeoutError, FutureTimeoutError): LOGGER.error(f'Task with batch_id {batch_id} timeouted') raise Exception('Timeout') batch_resp = client_batch_submit_pb2.ClientBatchStatusResponse() batch_resp.ParseFromString(resp) LOGGER.debug(f'Batch: {resp}') batch_resp_str = repr(batch_resp).replace("\n", "") LOGGER.debug(f'Batch parsed: {batch_resp_str}') hash_sum = hashlib.sha256(batch_resp.SerializeToString()).hexdigest() LOGGER.debug(f'got hashsum: {hash_sum}') data = message_to_dict(batch_resp) LOGGER.debug(f'data: {data}') try: batch_data = data['batch_statuses'][0] except IndexError: raise Exception(f'Batch with id "{batch_id}" not found') assert batch_id == batch_data['batch_id'], \ f'Batches not matched (req: {batch_id}, ' \ f'got: {batch_data["batch_id"]})' prep_resp = {'batch_statuses': batch_data} return prep_resp, hash_sum
def get_batch(self, batch_id): self._stream.wait_for_ready() future = self._stream.send( message_type=Message.CLIENT_BATCH_STATUS_REQUEST, content=client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=[batch_id], ).SerializeToString()) try: resp = future.result(10).content except ValidatorConnectionError as vce: LOGGER.error('ZMQ error: %s' % vce) raise Exception('Failed with ZMQ interaction: {0}'.format(vce)) except asyncio.TimeoutError: LOGGER.error(f'Task with batch_id {batch_id} timeouted') raise Exception('Timeout') batch_resp = client_batch_submit_pb2.ClientBatchStatusResponse() batch_resp.ParseFromString(resp) LOGGER.debug('Batch: %s', resp) LOGGER.info('Batch parsed: %s', batch_resp) hash_sum = hashlib.sha256(batch_resp.SerializeToString()).hexdigest() data = message_to_dict(batch_resp) LOGGER.debug('data: %s', data) try: batch_data = data['batch_statuses'][0] except IndexError: raise Exception(f'Batch with id "{batch_id}" not found') assert batch_id == batch_data['batch_id'], \ 'Batches not matched (req: {0}, got: {1})'.format(batch_id, batch_data['batch_id']) prep_resp = {'batch_statuses': batch_data} return prep_resp, hash_sum
async def _make_token_transfer(self,request,address_from,address_to,num_bgt,coin_code='bgt'): """ Make transfer from wallet to wallet """ payload_bytes = cbor.dumps({ 'Verb' : 'transfer', 'Name' : address_from, 'to_addr': address_to, 'num_bgt': num_bgt, 'group_id' : coin_code }) LOGGER.debug('BgxRouteHandler: _make_token_transfer make payload=%s',payload_bytes) in_address = make_smart_bgt_address(address_from) out_address = make_smart_bgt_address(address_to) inputs =[in_address, out_address] outputs=[in_address, out_address] transaction = self._create_transaction(payload_bytes,inputs,outputs) batch = self._create_batch([transaction]) batch_id = batch.header_signature #batch_list.batches[0].header_signature # Query validator error_traps = [error_handlers.BatchInvalidTrap,error_handlers.BatchQueueFullTrap] validator_query = client_batch_submit_pb2.ClientBatchSubmitRequest(batches=[batch]) LOGGER.debug('BgxRouteHandler: _make_token_transfer send batch_id=%s',batch_id) with self._post_batches_validator_time.time(): await self._query_validator( Message.CLIENT_BATCH_SUBMIT_REQUEST, client_batch_submit_pb2.ClientBatchSubmitResponse, validator_query, error_traps) # Ask validator for batch status while it changes from PENDING pending_status = 'PENDING' status = '' while True: error_traps = [error_handlers.StatusResponseMissing] validator_query = \ client_batch_submit_pb2.ClientBatchStatusRequest( batch_ids=[batch_id]) self._set_wait(request, validator_query) response = await self._query_validator( Message.CLIENT_BATCH_STATUS_REQUEST, client_batch_submit_pb2.ClientBatchStatusResponse, validator_query, error_traps) metadata = self._get_metadata(request, response) data = self._drop_id_prefixes( self._drop_empty_props(response['batch_statuses'])) LOGGER.debug('CLIENT_BATCH_STATUS_REQUEST:metadata:%s', metadata) LOGGER.debug('CLIENT_BATCH_STATUS_REQUEST:data:%s', data) batch = data[0] if batch['status'] != pending_status: status = batch['status'] break time.sleep(5) # Build response envelope # link = self._build_url(request, path='/batch_statuses', id=batch_id) return status