def create_fake_letter_response_file(self, reference): now = datetime.utcnow() dvla_response_data = '{}|Sent|0|Sorted'.format(reference) # try and find a filename that hasn't been taken yet - from a random time within the last 30 seconds for i in sorted(range(30), key=lambda _: random.random()): upload_file_name = 'NOTIFY-{}-RSP.TXT'.format( (now - timedelta(seconds=i)).strftime('%Y%m%d%H%M%S')) if not file_exists(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], upload_file_name): break else: raise ValueError( 'cant create fake letter response file for {} - too many files for that time already exist on s3' .format(reference)) s3upload(filedata=dvla_response_data, region=current_app.config['AWS_REGION'], bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], file_location=upload_file_name) current_app.logger.info( "Fake DVLA response file {}, content [{}], uploaded to {}, created at {}" .format(upload_file_name, dvla_response_data, current_app.config['DVLA_RESPONSE_BUCKET_NAME'], now)) # on development we can't trigger SNS callbacks so we need to manually hit the DVLA callback endpoint if current_app.config['NOTIFY_ENVIRONMENT'] == 'development': make_request('letter', 'dvla', _fake_sns_s3_callback(upload_file_name), None)
def upload_letter_pdf(notification, pdf_data, precompiled=False): current_app.logger.info( "PDF Letter {} reference {} created at {}, {} bytes".format( notification.id, notification.reference, notification.created_at, len(pdf_data))) upload_file_name = generate_letter_pdf_filename( reference=notification.reference, created_at=notification.created_at, ignore_folder=precompiled or notification.key_type == KEY_TYPE_TEST, postage=notification.postage) if precompiled: bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME'] elif notification.key_type == KEY_TYPE_TEST: bucket_name = current_app.config['TEST_LETTERS_BUCKET_NAME'] else: bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] s3upload(filedata=pdf_data, region=current_app.config['AWS_REGION'], bucket_name=bucket_name, file_location=upload_file_name) current_app.logger.info( "Uploaded letters PDF {} to {} for notification id {}".format( upload_file_name, bucket_name, notification.id)) return upload_file_name
def create_pdf_for_templated_letter(self, encrypted_letter_data): letter_details = current_app.encryption_client.decrypt( encrypted_letter_data) current_app.logger.info( f"Creating a pdf for notification with id {letter_details['notification_id']}" ) logo_filename = f'{letter_details["logo_filename"]}.svg' if letter_details[ 'logo_filename'] else None template = LetterPrintTemplate( letter_details['template'], values=letter_details['values'] or None, contact_block=letter_details['letter_contact_block'], # letter assets are hosted on s3 admin_base_url=current_app.config['LETTER_LOGO_URL'], logo_file_name=logo_filename, ) with current_app.test_request_context(''): html = HTML(string=str(template)) try: pdf = BytesIO(html.write_pdf()) except WeasyprintError as exc: self.retry(exc=exc, queue=QueueNames.SANITISE_LETTERS) cmyk_pdf = convert_pdf_to_cmyk(pdf) page_count = get_page_count(cmyk_pdf.read()) cmyk_pdf.seek(0) try: # If the file already exists in S3, it will be overwritten if letter_details["key_type"] == "test": bucket_name = current_app.config['TEST_LETTERS_BUCKET_NAME'] else: bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] s3upload( filedata=cmyk_pdf, region=current_app.config['AWS_REGION'], bucket_name=bucket_name, file_location=letter_details["letter_filename"], ) current_app.logger.info( f"Uploaded letters PDF {letter_details['letter_filename']} to {bucket_name} for " f"notification id {letter_details['notification_id']}") except BotoClientError: current_app.logger.exception( f"Error uploading {letter_details['letter_filename']} to pdf bucket " f"for notification {letter_details['notification_id']}") return notify_celery.send_task(name=TaskNames.UPDATE_BILLABLE_UNITS_FOR_LETTER, kwargs={ "notification_id": letter_details["notification_id"], "page_count": page_count, }, queue=QueueNames.LETTERS)
def upload_letter_pdf(notification, pdf_data, precompiled=False): current_app.logger.info( "PDF Letter {} reference {} created at {}, {} bytes".format( notification.id, notification.reference, notification.created_at, len(pdf_data), ) ) upload_file_name = get_letter_pdf_filename( notification.reference, notification.service.crown, is_scan_letter=precompiled or notification.key_type == KEY_TYPE_TEST, postage=notification.postage, ) if precompiled: bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] elif notification.key_type == KEY_TYPE_TEST: bucket_name = current_app.config["TEST_LETTERS_BUCKET_NAME"] else: bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] s3upload( filedata=pdf_data, region=current_app.config["AWS_REGION"], bucket_name=bucket_name, file_location=upload_file_name, ) current_app.logger.info( "Uploaded letters PDF {} to {} for notification id {}".format(upload_file_name, bucket_name, notification.id) ) return upload_file_name
def upload_letter_pdf(notification, pdf_data, precompiled=False): current_app.logger.info("PDF Letter {} reference {} created at {}, {} bytes".format( notification.id, notification.reference, notification.created_at, len(pdf_data))) upload_file_name = get_letter_pdf_filename( notification.reference, notification.service.crown, is_scan_letter=precompiled) if precompiled: bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME'] else: bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] s3upload( filedata=pdf_data, region=current_app.config['AWS_REGION'], bucket_name=bucket_name, file_location=upload_file_name, tags={Retention.KEY: Retention.ONE_WEEK} ) current_app.logger.info("Uploaded letters PDF {} to {} for notification id {}".format( upload_file_name, bucket_name, notification.id)) return upload_file_name
def sanitise_and_upload_letter(notification_id, filename, allow_international_letters=False): current_app.logger.info( 'Sanitising notification with id {}'.format(notification_id)) try: pdf_content = s3download( current_app.config['LETTERS_SCAN_BUCKET_NAME'], filename).read() sanitisation_details = sanitise_file_contents( pdf_content, allow_international_letters=allow_international_letters, filename=filename) # Only files that have failed sanitisation have 'message' in the sanitisation_details dict if sanitisation_details.get('message'): validation_status = 'failed' else: validation_status = 'passed' file_data = base64.b64decode(sanitisation_details['file'].encode()) redaction_failed_message = sanitisation_details.get( 'redaction_failed_message') if redaction_failed_message: current_app.logger.info( f'{redaction_failed_message} for file {filename}') copy_redaction_failed_pdf(filename) # If the file already exists in S3, it will be overwritten s3upload( filedata=file_data, region=current_app.config['AWS_REGION'], bucket_name=current_app.config['SANITISED_LETTER_BUCKET_NAME'], file_location=filename, ) current_app.logger.info('Notification {} sanitisation: {}'.format( validation_status, notification_id)) except BotoClientError: current_app.logger.exception( "Error downloading {} from scan bucket or uploading to sanitise bucket for notification {}" .format(filename, notification_id)) return sanitise_data = { 'page_count': sanitisation_details['page_count'], 'message': sanitisation_details['message'], 'invalid_pages': sanitisation_details['invalid_pages'], 'validation_status': validation_status, 'filename': filename, 'notification_id': notification_id, 'address': sanitisation_details['recipient_address'] } encrypted_data = current_app.encryption_client.encrypt(sanitise_data) notify_celery.send_task(name=TaskNames.PROCESS_SANITISED_LETTER, args=(encrypted_data, ), queue=QueueNames.LETTERS)
def test_s3upload_save_file_to_bucket(mocker): mocked = mocker.patch('notifications_utils.s3.resource') s3upload(filedata=contents, region=region, bucket_name=bucket, file_location=location) mocked_put = mocked.return_value.Object.return_value.put mocked_put.assert_called_once_with(Body=contents, ServerSideEncryption='AES256')
def test_s3upload_raises_exception(app, mocker): mocked = mocker.patch('notifications_utils.s3.resource') response = {'Error': {'Code': 500}} exception = botocore.exceptions.ClientError(response, 'Bad exception') mocked.return_value.meta.client.head_bucket.side_effect = exception with pytest.raises(botocore.exceptions.ClientError): s3upload(filedata=contents, region=region, bucket_name=bucket, file_location="location")
def _upload_pdf_to_test_or_live_pdf_bucket(pdf_data, filename, is_test_letter): target_bucket_config = 'TEST_LETTERS_BUCKET_NAME' if is_test_letter else 'LETTERS_PDF_BUCKET_NAME' target_bucket_name = current_app.config[target_bucket_config] target_filename = get_folder_name(datetime.utcnow(), is_test_letter) + filename s3upload(filedata=pdf_data, region=current_app.config['AWS_REGION'], bucket_name=target_bucket_name, file_location=target_filename)
def _upload_pdf_to_test_or_live_pdf_bucket(pdf_data, filename, is_test_letter, created_at): target_bucket_config = 'TEST_LETTERS_BUCKET_NAME' if is_test_letter else 'LETTERS_PDF_BUCKET_NAME' target_bucket_name = current_app.config[target_bucket_config] target_filename = get_folder_name( created_at, dont_use_sending_date=is_test_letter) + filename s3upload(filedata=pdf_data, region=current_app.config['AWS_REGION'], bucket_name=target_bucket_name, file_location=target_filename)
def test_s3upload_creates_bucket_if_bucket_does_not_exist(mocker): mocked = mocker.patch('notifications_utils.s3.resource') response = {'Error': {'Code': 404}} exception = botocore.exceptions.ClientError(response, 'Does not exist') mocked.return_value.meta.client.head_bucket.side_effect = exception s3upload(filedata=contents, region=region, bucket_name=bucket, file_location=location) mocked_create_bucket = mocked.return_value.create_bucket mocked_create_bucket.assert_called_once_with( Bucket=bucket, CreateBucketConfiguration={'LocationConstraint': region})
def test_s3upload_save_file_to_bucket_with_metadata(mocker): mocked = mocker.patch('notifications_utils.s3.resource') s3upload(filedata=contents, region=region, bucket_name=bucket, file_location=location, metadata={ 'status': 'valid', 'pages': '5' }) mocked_put = mocked.return_value.Object.return_value.put metadata = mocked_put.call_args[1]['Metadata'] assert metadata == {'status': 'valid', 'pages': '5'}
def test_s3upload_save_file_to_bucket_with_urlencoded_tags(mocker): mocked = mocker.patch('notifications_utils.s3.resource') s3upload( filedata=contents, region=region, bucket_name=bucket, file_location=location, tags={ 'a': '1/2', 'b': 'x y' }, ) mocked_put = mocked.return_value.Object.return_value.put # make sure tags were a urlencoded query string encoded_tags = mocked_put.call_args[1]['Tagging'] assert parse_qs(encoded_tags) == {'a': ['1/2'], 'b': ['x y']}
def create_fake_letter_response_file(self, reference): now = datetime.utcnow() dvla_response_data = '{}|Sent|0|Sorted'.format(reference) upload_file_name = 'NOTIFY-{}-RSP.TXT'.format(now.strftime('%Y%m%d%H%M%S')) s3upload(filedata=dvla_response_data, region=current_app.config['AWS_REGION'], bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], file_location=upload_file_name) current_app.logger.info( "Fake DVLA response file {}, content [{}], uploaded to {}, created at {}" .format(upload_file_name, dvla_response_data, current_app.config['DVLA_RESPONSE_BUCKET_NAME'], now)) # on development we can't trigger SNS callbacks so we need to manually hit the DVLA callback endpoint if current_app.config['NOTIFY_ENVIRONMENT'] == 'development': make_request('letter', 'dvla', reference, _fake_sns_s3_callback(upload_file_name), None)
def new_function(): with suppress(S3ObjectNotFound): return s3download( application.config['LETTER_CACHE_BUCKET_NAME'], cache_key, ) data = original_function() s3upload( data, application.config['AWS_REGION'], application.config['LETTER_CACHE_BUCKET_NAME'], cache_key, ) data.seek(0) return data