def tracking(self, request, version, pk): """ Retrieve tracking data from db """ LOG.debug(f'Retrieve tracking data for a shipment {pk}.') log_metric('transmission.info', tags={ 'method': 'shipments.tracking', 'module': __name__ }) shipment = self.get_object() if hasattr(shipment, 'routeleg'): if shipment.state == TransitState.AWAITING_PICKUP: # RouteTrackingData may contain data for other shipments already picked up. # This shipment should not include those data as it has not yet begun transit. tracking_data = RouteTrackingData.objects.none() else: tracking_data = RouteTrackingData.objects.filter( route__id=shipment.routeleg.route.id) else: tracking_data = TrackingData.objects.filter( shipment__id=shipment.id) response = Template('{"data": $geojson}') response = response.substitute( geojson=render_filtered_point_features(shipment, tracking_data)) return HttpResponse(content=response, content_type='application/vnd.api+json')
def shipment_quickadd_tracking_changed(sender, instance, changed_fields, **kwargs): if instance.quickadd_tracking and settings.SNS_CLIENT: response = requests.post( f'{settings.AFTERSHIP_URL}trackings', json={'tracking': { 'tracking_number': instance.quickadd_tracking }}, headers={'aftership-api-key': settings.AFTERSHIP_API_KEY}) if not response.ok: raise ValidationError( 'Supplied tracking number has already been imported') tracking_data = response.json()['data']['tracking'] log_metric('transmission.info', tags={ 'method': 'shipments.quickadd_shipment', 'carrier_abbv': tracking_data['slug'], 'module': __name__ }) instance.anonymous_historical_change( shippers_reference= f'Quickadd Shipment: {instance.quickadd_tracking}', carrier_abbv=tracking_data['slug']) instance.refresh_from_db(fields=['shippers_reference', 'carrier_abbv']) SNSClient().aftership_tracking_update(instance, tracking_data['id'], instance.updated_by)
def message_post_save(sender, instance, **kwargs): LOG.debug(f'Message post save with message {instance.id}.') log_metric('transmission.info', tags={ 'method': 'jobs.message_post_save', 'module': __name__ }) try: wallet_lock = cache.lock( instance.async_job.parameters['signing_wallet_id']) wallet_lock.local.token = instance.async_job.wallet_lock_token wallet_lock.release() except LockError: LOG.warning( f'Wallet {instance.async_job.parameters["signing_wallet_id"]} was not locked when ' f'job {instance.async_job.id} received message {instance.id}') if instance.type == MessageType.ERROR: # Generic error handling LOG.error( f"Transaction failure for AsyncJob {instance.async_job.id}: {instance.body}" ) instance.async_job.state = JobState.FAILED instance.async_job.save() # Update has been received, send signal to listener LOG.debug( f'Update has been received, and signal sent to listener {instance.id}.' ) job_update.send(sender=Shipment, message=instance, shipment=instance.async_job.shipment)
def get_queryset(self, *args, **kwargs): log_metric('transmission.info', tags={ 'method': 'transaction.get_queryset', 'module': __name__ }) LOG.debug('Getting tx details for a transaction hash.') queryset = super().get_queryset(*args, **kwargs) if settings.PROFILES_ENABLED: if 'wallet_id' in self.request.query_params: queryset = queryset.filter( Q(shipment__owner_id=get_owner_id(self.request)) | Q(shipment__shipper_wallet_id=self.request.query_params. get('wallet_id')) | Q(shipment__moderator_wallet_id=self.request. query_params.get('wallet_id')) | Q(shipment__carrier_wallet_id=self.request.query_params. get('wallet_id'))) else: permission_link = self.request.query_params.get( 'permission_link', None) if permission_link: try: permission_link_obj = PermissionLink.objects.get( pk=permission_link) except ObjectDoesNotExist: LOG.warning( f'User: {self.request.user}, is trying to access a shipment\'s transactions ' f'with permission link: {permission_link}') raise PermissionDenied('No permission link found.') queryset = queryset.filter( shipment_owner_access_filter(self.request) | Q(shipment__pk=permission_link_obj.shipment.pk)) return queryset
def post(self, request, *args, **kwargs): LOG.debug( f'Performing action on shipment with id: {kwargs["shipment_pk"]}.') log_metric('transmission.info', tags={ 'method': 'shipment.action', 'module': __name__ }) shipment = Shipment.objects.get(id=kwargs['shipment_pk']) serializer = ShipmentActionRequestSerializer(data=request.data, context={ 'shipment': shipment, 'request': request }) serializer.is_valid(raise_exception=True) method = serializer.validated_data.pop('action_type') method.value(shipment, **serializer.validated_data) shipment.save() return Response( ShipmentSerializer(shipment, context={ 'request': request }).data)
def list(self, request, *args, **kwargs): LOG.debug( f'Listing shipment history for shipment with id: {kwargs["shipment_pk"]}.' ) log_metric('transmission.info', tags={ 'method': 'shipment.history', 'module': __name__ }) queryset = Shipment.history.select_related( 'device', 'ship_from_location', 'ship_to_location', 'final_destination_location', 'bill_to_location').filter(id=kwargs['shipment_pk']) serializer = ChangesDiffSerializer(queryset, request, kwargs['shipment_pk']) queryset = serializer.filter_queryset(serializer.historical_queryset) paginator = self.pagination_class() page = paginator.paginate_queryset(queryset, self.request, view=self) if page is not None: return paginator.get_paginated_response(serializer.get_data(page)) return Response(serializer.get_data(queryset))
def unsubscribe(self, project, version, url=Event.get_event_subscription_url()): LOG.debug(f'Event unsubscription with url {url}.') log_metric('transmission.info', tags={ 'method': 'event_rpcclient.subscribe', 'module': __name__ }) result = self.call('event.unsubscribe', { "url": url, "project": project, "version": version }) if 'success' in result and result['success']: if 'subscription' in result: return log_metric('transmission.error', tags={ 'method': 'event_rpcclient.unsubscribe', 'code': 'RPCError', 'module': __name__ }) raise RPCError("Invalid response from Engine")
def add_document_from_s3(self, bucket, key, vault_wallet, storage_credentials, vault, document_name): LOG.debug( f'Telling Engine to fetch doc {document_name} from bucket {bucket} at {key} and put in vault {vault}' ) log_metric('transmission.info', tags={ 'method': 'document_rpcclient.add_document_from_s3', 'module': __name__ }) result = self.call( 'vault.add_document_from_s3', { "bucket": bucket, "key": key, "vaultWallet": vault_wallet, "storageCredentials": storage_credentials, "vault": vault, "documentName": document_name, }) if 'success' in result and result['success']: if 'vault_signed' in result: return result['vault_signed'] log_metric('transmission.error', tags={ 'method': 'document_rpcclient.add_document_from_s3', 'code': 'RPCError', 'module': __name__ }) raise RPCError("Invalid response from Engine")
def get_presigned_s3(self, obj): if obj.upload_status != UploadStatus.COMPLETE: return super().get_presigned_s3(obj) try: settings.S3_CLIENT.head_object(Bucket=self._s3_bucket, Key=obj.s3_key) except ClientError: # The document doesn't exist anymore in the bucket. The bucket is going to be repopulated from vault result = DocumentRPCClient().put_document_in_s3( self._s3_bucket, obj.s3_key, obj.shipper_wallet_id, obj.storage_id, obj.vault_id, obj.filename) if not result: return None url = settings.S3_CLIENT.generate_presigned_url( 'get_object', Params={ 'Bucket': f"{self._s3_bucket}", 'Key': obj.s3_key }, ExpiresIn=settings.S3_URL_LIFE) LOG.debug(f'Generated one time s3 url for: {obj.id}') log_metric('transmission.info', tags={ 'method': 'documents.generate_presigned_url', 'module': __name__ }) return url
def _sign_transaction(self, unsigned_tx): LOG.debug(f'Signing transaction for job {self.async_job.id}') log_metric('transmission.info', tags={'method': 'async_task._sign_transaction', 'module': __name__}) from apps.eth.models import EthAction, Transaction signed_tx, hash_tx = getattr(self.rpc_client, 'sign_transaction')( self.async_job.parameters['signing_wallet_id'], unsigned_tx) self.async_job.parameters['signed_tx'] = signed_tx # Create EthAction so this Job's Listeners can also listen to Events posted for the TransactionHash with transaction.atomic(): eth_action, created = EthAction.objects.get_or_create(transaction_hash=hash_tx, defaults={ 'async_job': self.async_job, 'shipment_id': self.async_job.shipment.id }) if created: LOG.debug(f'Created new EthAction {eth_action.transaction_hash}') eth_action.transaction = Transaction.from_unsigned_tx(unsigned_tx) eth_action.transaction.hash = hash_tx eth_action.transaction.save() eth_action.save() else: # There is already a transaction with this transaction hash - retry later (get another nonce) log_metric('transmission.error', tags={'method': 'async_task._sign_transaction', 'module': __name__, 'code': 'transaction_in_progress'}) raise TransactionCollisionException(f'A transaction with the hash {hash_tx} is already in progress.') return signed_tx, eth_action
def update(self, request, *args, **kwargs): """ Update the shipment with new details, overwriting the built-in method """ partial = kwargs.pop('partial', False) instance = self.get_object() LOG.debug(f'Updating shipment {instance.id} with new details.') log_metric('transmission.info', tags={ 'method': 'shipments.update', 'module': __name__ }) serializer = self.get_serializer(instance, data=request.data, partial=partial) serializer.is_valid(raise_exception=True) shipment = self.perform_update(serializer) async_jobs = shipment.asyncjob_set.filter( state__in=[JobState.PENDING, JobState.RUNNING]) response = self.get_serializer( shipment, serialization_type=SerializationType.RESPONSE) response.instance.async_job_id = async_jobs.latest( 'created_at').id if async_jobs else None return Response(response.data, status=status.HTTP_202_ACCEPTED)
def message(self, request, version, pk): LOG.debug('Jobs message called.') log_metric('transmission.info', tags={'method': 'jobs.message', 'module': __name__}) serializer = MessageSerializer(data=request.data) serializer.is_valid(raise_exception=True) Message.objects.get_or_create(**serializer.data, async_job_id=pk) return Response(status=status.HTTP_204_NO_CONTENT)
def create(self, request, *args, **kwargs): """ Creates a link to grant permission to read shipments """ shipment_id = kwargs['shipment_pk'] LOG.debug(f'Creating permission link for shipment {shipment_id}') log_metric('transmission.info', tags={ 'method': 'shipments.create_permission_link', 'module': __name__ }) create_serializer = PermissionLinkCreateSerializer(data=request.data) create_serializer.is_valid(raise_exception=True) emails = None if 'emails' in create_serializer.validated_data: emails = create_serializer.validated_data.pop('emails') permission_link = create_serializer.save(shipment_id=shipment_id) if settings.PROFILES_ENABLED and emails: LOG.debug(f'Emailing permission link: {permission_link.id}') link = f'{"https" if request.is_secure() else "http"}://{settings.FRONTEND_DOMAIN}/shipments/' \ f'{shipment_id}/?permission_link={permission_link.id}' if settings.IOT_THING_INTEGRATION: url_client = URLShortener() params = { 'long_url': link, 'expiration_date': (permission_link.expiration_date.isoformat() if permission_link.expiration_date else None) } try: link = url_client.get_shortened_link(params) except (URLShortenerError, AWSIoTError) as exc: LOG.error( f'Error generating shortened link, raised error: {exc.detail}' ) email_context = { 'username': request.user.username, 'link': link, 'request': request } send_templated_email( 'email/shipment_link.html', f'{request.user.username} shared a shipment details page with you.', email_context, emails) return Response(self.get_serializer(permission_link).data, status=status.HTTP_201_CREATED)
def _get_transaction(self): LOG.debug(f'Getting transaction for job {self.async_job.id}, {self.async_job.parameters["rpc_method"]}') log_metric('transmission.info', tags={'method': 'async_task._get_transaction', 'module': __name__}) if self.async_job.parameters['rpc_method'] == 'create_shipment_transaction': contract_version, unsigned_tx = getattr(self.rpc_client, self.async_job.parameters['rpc_method'])( *self.async_job.parameters['rpc_parameters']) self.async_job.shipment.anonymous_historical_change(contract_version=contract_version) else: unsigned_tx = getattr(self.rpc_client, self.async_job.parameters['rpc_method'])( *self.async_job.parameters['rpc_parameters']) return unsigned_tx
def throttle_failure(self): """ Called when a request to the API has failed due to throttling. """ log_metric('transmission.info', tags={ 'method': 'throttling.MonthlyRateThrottle', 'module': __name__, 'organization_id': self.key, 'success': False }) return super(MonthlyRateThrottle, self).throttle_failure()
def list(self, request, *args, **kwargs): queryset = self.filter_queryset(self.get_queryset()) log_metric('transmission.info', tags={ 'method': 'transaction.list', 'module': __name__ }) shipment_pk = kwargs.get('shipment_pk', None) if shipment_pk: LOG.debug(f'Getting transactions for shipment: {shipment_pk}.') queryset = queryset.filter(shipment__id=shipment_pk) else: LOG.debug('Getting tx details filtered by wallet address.') if not settings.PROFILES_ENABLED: if 'wallet_address' not in self.request.query_params: raise serializers.ValidationError( 'wallet_address required in query parameters') from_address = self.request.query_params.get('wallet_address') else: if 'wallet_id' not in self.request.query_params: raise serializers.ValidationError( 'wallet_id required in query parameters') wallet_id = self.request.query_params.get('wallet_id') wallet_response = settings.REQUESTS_SESSION.get( f'{settings.PROFILES_URL}/api/v1/wallet/{wallet_id}/?is_active', headers={ 'Authorization': f'JWT {get_jwt_from_request(request)}' }) if not wallet_response.status_code == status.HTTP_200_OK: raise serializers.ValidationError( 'Error retrieving Wallet from ShipChain Profiles') wallet_details = wallet_response.json() from_address = wallet_details['data']['attributes']['address'] queryset = queryset.filter( transactionreceipt__from_address__iexact=from_address) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True) return Response(serializer.data)
def engine_subscribe_generic(project, version, events=None): log_metric('transmission.info', tags={'method': f'eth.engine_subscribe_{project.lower()}', 'module': __name__}) try: rpc_client = EventRPCClient() rpc_client.subscribe(project=project, version=version, events=events) LOG.debug(f'Subscribed to {project} events for version {version} successfully with the rpc_client.') except RPCError as rpc_error: log_metric('transmission.info', tags={'method': f'eth.engine_subscribe_{project.lower()}', 'code': 'RPCError', 'module': __name__}) LOG.error(f'Unable to subscribe to {project}, for version {version}, Events: {rpc_error}.') raise rpc_error
def update_shadow(self, device_id, shadow): LOG.debug('Updating Device Shadow in AWS IoT') log_metric('transmission.info', tags={'method': 'device.aws_iot.update_shadow'}) payload = {"data": shadow} iot_shadow = self._put(f'device/{device_id}/config', payload) if 'data' not in iot_shadow: raise AWSIoTError("Error in response from AWS IoT") return iot_shadow['data']
def throttle_success(self): """ Inserts the current request's timestamp along with the key into the cache. """ log_metric('transmission.info', tags={ 'method': 'throttling.MonthlyRateThrottle', 'module': __name__, 'organization_id': self.key, 'success': True }) return super(MonthlyRateThrottle, self).throttle_success()
def _process_event(event, project): if project == 'LOAD': try: action = EthAction.objects.get( transaction_hash=event['transaction_hash']) Event.objects.get_or_create(defaults=event, eth_action=action, log_index=event['log_index']) except RPCError as exc: LOG.info( f"Engine RPC error processing event {event['transaction_hash']}: {exc}" ) except MultipleObjectsReturned as exc: LOG.info( f"MultipleObjectsReturned during get/get_or_create for event {event['transaction_hash']}: " f"{exc}") except ObjectDoesNotExist: LOG.info( f"Non-EthAction Event processed Tx: {event['transaction_hash']}" ) log_metric('transmission.info', tags={ 'method': 'events.create', 'code': 'non_ethaction_event', 'module': __name__, 'project': project }) elif project == 'ShipToken' and event['event_name'] == 'Transfer': LOG.info( f"ShipToken Transfer processed Tx: {event['transaction_hash']}" ) log_metric('transmission.info', tags={ 'method': 'event.transfer', 'module': __name__, 'project': project }, fields={ 'from_address': event['return_values']['from'], 'to_address': event['return_values']['to'], 'token_amount': float(event['return_values']['value']) / (10**18), 'count': 1 }) else: LOG.warning( f"Unexpected event {event} found with project: {project}")
def telemetry_data_update(self, shipment_id, payload): log_metric('transmission.info', tags={'method': 'shipments_tasks.telemetry_data_update', 'module': __name__}) shipment = Shipment.objects.get(id=shipment_id) # TODO: return if shipment is delivered (or otherwise complete)? rpc_client = RPCClientFactory.get_client() signature = rpc_client.add_telemetry_data(shipment.storage_credentials_id, shipment.shipper_wallet_id, shipment.vault_id, payload) shipment.set_vault_hash(signature['hash'], rate_limit=shipment.background_data_hash_interval, action_type=AsyncActionType.TELEMETRY, use_updated_by=False)
def send_transaction(self, signed_transaction, callback_url): LOG.debug('Sending transaction %s with callback_url %s.', signed_transaction, callback_url) log_metric('python_common.info', tags={'method': 'RPCClient.send_transaction', 'module': __name__}) result = self.call('transaction.send', { "callbackUrl": callback_url, "txSigned": signed_transaction }) if 'success' in result and result['success']: if 'receipt' in result: LOG.debug('Successful sending of transaction.') return result['receipt'] log_metric('engine_rpc.error', tags={'method': 'RPCClient.sign_transaction', 'module': __name__}) raise RPCError("Invalid response from Engine")
def sign_transaction(self, wallet_id, transaction): LOG.debug('Signing transaction %s with wallet_id %s.', transaction, wallet_id) log_metric('python_common.info', tags={'method': 'RPCClient.sign_transaction', 'module': __name__}) result = self.call('transaction.sign', { "signerWallet": wallet_id, "txUnsigned": transaction }) if 'success' in result and result['success']: if 'transaction' in result: LOG.debug('Successful signing of transaction.') return result['transaction'], result['hash'] log_metric('engine_rpc.error', tags={'method': 'RPCClient.sign_transaction', 'module': __name__}) raise RPCError("Invalid response from Engine")
def async_job_fire(self): # Lock on Task ID to protect against tasks that are queued multiple times task_lock = cache.lock(self.request.id, timeout=600) if task_lock.acquire(blocking=False): try: async_job_id = self.request.id LOG.debug(f'AsyncJob {async_job_id} firing!') log_metric('transmission.info', tags={'method': 'async_task.async_job_fire', 'module': __name__}) task = None try: task = AsyncTask(async_job_id) while cache.get("REPLICATE_SHIPMENTS_LOCK", None): LOG.info("Lock for Replicating Shipments currently in use.") time.sleep(15) task.run() except (WalletInUseException, TransactionCollisionException) as exc: LOG.info(f"AsyncJob can't be processed yet ({async_job_id}): {exc}") countdown = (settings.CELERY_TXHASH_RETRY if isinstance(exc, TransactionCollisionException) else settings.CELERY_WALLET_RETRY) raise self.retry(exc=exc, countdown=countdown * random.uniform(0.5, 1.5)) # nosec #B311 except RPCError as rpc_error: log_metric('transmission.error', tags={'method': 'async_job_fire', 'module': __name__, 'code': 'RPCError'}) LOG.error(f"AsyncJob Exception ({async_job_id}): {rpc_error}") raise rpc_error except ObjectDoesNotExist as exc: LOG.error(f'Could not find AsyncTask ({async_job_id}): {exc}') raise exc except Exception as exc: LOG.error(f'Unhandled AsyncJob exception ({async_job_id}): {exc}') raise exc except Exception as exc: if self.request.retries >= self.max_retries and task: from .models import JobState LOG.error(f"AsyncJob ({async_job_id}) failed after max retries: {exc}") task.async_job.state = JobState.FAILED task.async_job.save() raise exc finally: task_lock.release() else: # Celery Task with this ID is already in progress, must have been queued multiple times. LOG.info(f'Disregarding Celery task {self.request.id}, it has already been locked for processing.')
def create(self, request, *args, **kwargs): """ Create a pre-signed s3 post and create a corresponding document object with pending status """ LOG.debug('Creating a ShipmentImport document object') log_metric('transmission.info', tags={ 'method': 'imports.create', 'module': __name__ }) serializer = ShipmentImportCreateSerializer( data=request.data, context={'auth': get_jwt_from_request(request)}) serializer.is_valid(raise_exception=True) doc_obj = self.perform_create(serializer) return Response(self.get_serializer(doc_obj).data, status=status.HTTP_201_CREATED)
def call(self, method, args=None): LOG.debug('Calling RPCClient with method %s', method) log_metric('python_common.info', tags={'method': 'RPCClient.call', 'module': __name__}) if args and not isinstance(args, object): raise RPCError("Invalid parameter type for Engine RPC call") self.payload['method'] = method self.payload['params'] = args or {} try: with TimingMetric('engine_rpc.call', tags={'method': method}) as timer: response = settings.REQUESTS_SESSION.post(self.url, data=json.dumps(self.payload, cls=DecimalEncoder), timeout=getattr(settings, 'REQUESTS_TIMEOUT', 270)) LOG.info('rpc_client(%s) duration: %.3f', method, timer.elapsed) if status.is_success(response.status_code): response_json = response.json() if 'error' in response_json: # It's an error properly handled by engine log_metric('engine_rpc.error', tags={'method': method, 'code': response_json['error']['code'], 'module': __name__}) LOG.error('rpc_client(%s) error: %s', method, response_json['error']) raise RPCError(response_json['error']['message']) else: # It's an unexpected error not handled by engine log_metric('engine_rpc.error', tags={'method': method, 'code': response.status_code, 'module': __name__}) LOG.error('rpc_client(%s) error: %s', method, response.content) raise RPCError(response.content) except requests.exceptions.ConnectionError: # Don't return the true ConnectionError as it can contain internal URLs log_metric('engine_rpc.error', tags={'method': method, 'code': 'ConnectionError', 'module': __name__}) raise RPCError("Service temporarily unavailable, try again later", status_code=status.HTTP_503_SERVICE_UNAVAILABLE, code='service_unavailable') except Exception as exception: log_metric('engine_rpc.error', tags={'method': method, 'code': 'Exception', 'module': __name__}) raise RPCError(str(exception)) return response_json['result']
def create(self, request, *args, **kwargs): log_metric('transmission.info', tags={ 'method': 'events.create', 'module': __name__, 'project': request.data['project'] }) LOG.debug('Events create') is_many = isinstance(request.data['events'], list) serializer = EventSerializer(data=request.data['events'], many=is_many) serializer.is_valid(raise_exception=True) events = serializer.data if is_many else [serializer.data] for event in events: self._process_event(event, request.data['project']) return Response(status=status.HTTP_204_NO_CONTENT)
def _send_transaction(self, signed_tx, eth_action): from .models import JobState from apps.eth.models import TransactionReceipt LOG.debug(f'Sending transaction for job {self.async_job.id}, hash {eth_action.transaction_hash}') log_metric('transmission.info', tags={'method': 'async_task._send_transaction', 'module': __name__}) try: receipt = getattr(self.rpc_client, 'send_transaction')(signed_tx, self.async_job.get_callback_url()) except RPCError as exc: # If Transaction submission fails, we want _sign_transaction to succeed next time eth_action.delete() raise exc with transaction.atomic(): eth_action.transactionreceipt = TransactionReceipt.from_eth_receipt(receipt) eth_action.transactionreceipt.save() self.async_job.state = JobState.RUNNING self.async_job.save()
def set_moderator_tx(self, wallet_id, current_shipment_id, moderator_wallet): LOG.debug(f'Updating moderator for current_shipment_id {current_shipment_id},' f'carrier {moderator_wallet}, and wallet_id {wallet_id}.') log_metric('transmission.info', tags={'method': 'shipment_rpcclient.set_moderator_tx', 'module': __name__}) result = self.call('load.1.1.0.set_moderator_tx', { "senderWallet": wallet_id, "shipmentUuid": current_shipment_id, "moderatorWallet": moderator_wallet }) if 'success' in result and result['success']: if 'transaction' in result and result['transaction']: LOG.debug('Successful update of moderator wallet transaction.') return result['transaction'] log_metric('transmission.error', tags={'method': 'shipment_rpcclient.set_moderator_tx', 'module': __name__, 'code': 'RPCError'}) LOG.error('Invalid update of moderator wallet transaction.') raise RPCError("Invalid response from Engine")
def set_vault_hash_tx(self, wallet_id, current_shipment_id, vault_hash): LOG.debug(f'Updating vault hash transaction with current_shipment_id {current_shipment_id},' f'vault_hash {vault_hash}, and wallet_id {wallet_id}.') log_metric('transmission.info', tags={'method': 'shipment_rpcclient.set_vault_hash_tx', 'module': __name__}) result = self.call('load.1.1.0.set_vault_hash_tx', { "senderWallet": wallet_id, "shipmentUuid": current_shipment_id, "hash": vault_hash }) if 'success' in result and result['success']: if 'transaction' in result and result['transaction']: LOG.debug('Successful update of vault hash transaction.') return result['transaction'] log_metric('transmission.error', tags={'method': 'shipment_rpcclient.set_vault_hash_tx', 'module': __name__, 'code': 'RPCError'}) LOG.error('Invalid update of vault hash transaction.') raise RPCError("Invalid response from Engine")