def redact_pii_documents_handler(event, context): """Redaction Lambda function handler.""" LOG.info('Received event with requestId: %s', event[REQUEST_ID]) LOG.debug(f'Raw event {event}') InputEventValidator.validate(event) invoke_args = json.loads(event[S3OL_CONFIGURATION][PAYLOAD]) if event[S3OL_CONFIGURATION][PAYLOAD] else {} language_code = invoke_args.get(LANGUAGE_CODE, DEFAULT_LANGUAGE_CODE) redaction_config = RedactionConfig(**invoke_args) object_get_context = event[GET_OBJECT_CONTEXT] s3ol_access_point = event[S3OL_CONFIGURATION][S3OL_ACCESS_POINT_ARN] s3 = S3Client(s3ol_access_point) cloud_watch = CloudWatchClient() comprehend = ComprehendClient(s3ol_access_point=s3ol_access_point, session_id=event[REQUEST_ID], user_agent=DEFAULT_USER_AGENT, endpoint_url=COMPREHEND_ENDPOINT_URL) exception_handler = ExceptionHandler(s3) LOG.debug("Pii Entity Types to be redacted:" + str(redaction_config.pii_entity_types)) processed_document = False document = Document('') try: def time_bound_task(): nonlocal processed_document nonlocal document PartialObjectRequestValidator.validate(event) pii_classification_segmenter = Segmenter(DOCUMENT_MAX_SIZE_CONTAINS_PII_ENTITIES) pii_redaction_segmenter = Segmenter(DOCUMENT_MAX_SIZE_DETECT_PII_ENTITIES) redactor = Redactor(redaction_config) time1 = time.time() text, http_headers, status_code = s3.download_file_from_presigned_url(object_get_context[INPUT_S3_URL], event[USER_REQUEST][HEADERS]) time2 = time.time() LOG.info(f"Downloaded the file in : {(time2 - time1)} seconds") document = redact(text, pii_classification_segmenter, pii_redaction_segmenter, redactor, comprehend, redaction_config, language_code) processed_document = True time1 = time.time() LOG.info(f"Pii redaction completed within {(time1 - time2)} seconds. Returning back the response to S3") redacted_text_bytes = document.redacted_text.encode('utf-8') http_headers[CONTENT_LENGTH] = len(redacted_text_bytes) s3.respond_back_with_data(redacted_text_bytes, http_headers, object_get_context[REQUEST_ROUTE], object_get_context[REQUEST_TOKEN], status_code) execute_task_with_timeout(context.get_remaining_time_in_millis() - RESERVED_TIME_FOR_CLEANUP, time_bound_task) except Exception as generated_exception: exception_handler.handle_exception(generated_exception, object_get_context[REQUEST_ROUTE], object_get_context[REQUEST_TOKEN]) finally: if PUBLISH_CLOUD_WATCH_METRICS: pii_entities = get_interested_pii(document, redaction_config) publish_metrics(cloud_watch, s3, comprehend, processed_document, len(pii_entities) > 0, language_code, s3ol_access_point, pii_entities) LOG.info("Responded back to s3 successfully")
def test_default_exception_handler(self): s3_client = MagicMock() ExceptionHandler(s3_client).handle_exception(Exception(), "SomeRoute", "SomeToken") s3_client.respond_back_with_error.assert_called_once_with(S3_STATUS_CODES.INTERNAL_SERVER_ERROR_500, S3_ERROR_CODES.InternalError, "An internal error occurred while processing the file", "SomeRoute", "SomeToken")
def test_file_size_limit_exceeded_handler(self): s3_client = MagicMock() ExceptionHandler(s3_client).handle_exception(FileSizeLimitExceededException(), "SomeRoute", "SomeToken") s3_client.respond_back_with_error.assert_called_once_with(S3_STATUS_CODES.BAD_REQUEST_400, S3_ERROR_CODES.EntityTooLarge, "Size of the requested object exceeds maximum file size supported", "SomeRoute", "SomeToken")
def test_timeout_exception_handler(self): s3_client = MagicMock() ExceptionHandler(s3_client).handle_exception(TimeoutException(), "SomeRoute", "SomeToken") s3_client.respond_back_with_error.assert_called_once_with(S3_STATUS_CODES.BAD_REQUEST_400, S3_ERROR_CODES.RequestTimeout, "Failed to complete document processing within time limit", "SomeRoute", "SomeToken")
def test_restricted_document_exception_handler(self): s3_client = MagicMock() ExceptionHandler(s3_client).handle_exception(RestrictedDocumentException(), "SomeRoute", "SomeToken") s3_client.respond_back_with_error.assert_called_once_with(S3_STATUS_CODES.FORBIDDEN_403, S3_ERROR_CODES.AccessDenied, "Document Contains PII", "SomeRoute", "SomeToken")
def test_s3_download_exception_handler(self): s3_client = MagicMock() ExceptionHandler(s3_client).handle_exception(S3DownloadException("InternalError", "Internal Server Error"), "SomeRoute", "SomeToken") s3_client.respond_back_with_error.assert_called_once_with(S3_STATUS_CODES.INTERNAL_SERVER_ERROR_500, S3_ERROR_CODES.InternalError, "Internal Server Error", "SomeRoute", "SomeToken")
def test_invalid_configuration_exception_handler(self): s3_client = MagicMock() ExceptionHandler(s3_client).handle_exception(InvalidConfigurationException("Missconfigured knob"), "SomeRoute", "SomeToken") s3_client.respond_back_with_error.assert_called_once_with(S3_STATUS_CODES.BAD_REQUEST_400, S3_ERROR_CODES.InvalidRequest, "Lambda function has been incorrectly setup", "SomeRoute", "SomeToken")
def test_unsupported_file_exception_handling_return_error(self): s3_client = MagicMock() ExceptionHandler(s3_client). \ handle_exception(UnsupportedFileException(file_content="SomeContent", http_headers={'h1': 'v1'}), "SomeRoute", "SomeToken") s3_client.respond_back_with_error.assert_called_once_with(S3_STATUS_CODES.BAD_REQUEST_400, S3_ERROR_CODES.UnexpectedContent, "Unsupported file encountered for determining Pii", "SomeRoute", "SomeToken")
def test_unsupported_file_exception_handling_return_unknown_error(self): s3_client = MagicMock() self.assertRaises(Exception, ExceptionHandler(s3_client).handle_exception, UnsupportedFileException(file_content="SomeContent", http_headers={'h1': 'v1'}), "SomeRoute", "SomeToken")
def test_unsupported_file_exception_handling_do_not_fail(self): s3_client = MagicMock() ExceptionHandler(s3_client). \ handle_exception(UnsupportedFileException(file_content="SomeContent", http_headers={'h1': 'v1'}), "SomeRoute", "SomeToken") s3_client.respond_back_with_data.assert_called_once_with("SomeContent", {'h1': 'v1'}, "SomeRoute", "SomeToken")