Beispiel #1
0
def upload_letter_to_s3(data,
                        *,
                        file_location,
                        status,
                        page_count,
                        filename,
                        message=None,
                        invalid_pages=None,
                        recipient=None):
    # Use of urllib.parse.quote encodes metadata into ascii, which is required by s3.
    # Making sure data for displaying to users is decoded is taken care of by LetterMetadata
    metadata = {
        'status': status,
        'page_count': str(page_count),
        'filename': urllib.parse.quote(filename),
    }
    if message:
        metadata['message'] = message
    if invalid_pages:
        metadata['invalid_pages'] = json.dumps(invalid_pages)
    if recipient:
        metadata['recipient'] = urllib.parse.quote(recipient)

    utils_s3upload(
        filedata=data,
        region=current_app.config['AWS_REGION'],
        bucket_name=current_app.config['TRANSIENT_UPLOADED_LETTERS'],
        file_location=file_location,
        metadata=metadata,
    )
def upload_letter_to_s3(data,
                        *,
                        file_location,
                        status,
                        page_count,
                        filename,
                        message=None,
                        invalid_pages=None,
                        recipient=None):
    metadata = {
        'status': status,
        'page_count': str(page_count),
        'filename': filename,
    }
    if message:
        metadata['message'] = message
    if invalid_pages:
        metadata['invalid_pages'] = json.dumps(invalid_pages)
    if recipient:
        metadata['recipient'] = format_recipient(recipient)

    utils_s3upload(
        filedata=data,
        region=current_app.config['AWS_REGION'],
        bucket_name=current_app.config['TRANSIENT_UPLOADED_LETTERS'],
        file_location=file_location,
        metadata=metadata,
    )
Beispiel #3
0
def upload_letter_png_logo(filename, filedata, region):
    bucket_name = current_app.config['LOGO_UPLOAD_BUCKET_NAME']
    utils_s3upload(filedata=filedata,
                   region=region,
                   bucket_name=bucket_name,
                   file_location=filename,
                   content_type='image/png')
Beispiel #4
0
def s3upload(service_id, filedata, region):
    upload_id = str(uuid.uuid4())
    upload_file_name = FILE_LOCATION_STRUCTURE.format(service_id, upload_id)
    utils_s3upload(filedata=filedata['data'],
                   region=region,
                   bucket_name=current_app.config['CSV_UPLOAD_BUCKET_NAME'],
                   file_location=upload_file_name)
    return upload_id
def s3upload(service_id, filedata, region):
    upload_id = str(uuid.uuid4())
    bucket_name, file_location = get_csv_location(service_id, upload_id)
    utils_s3upload(
        filedata=filedata['data'],
        region=region,
        bucket_name=bucket_name,
        file_location=file_location,
    )
    return upload_id
Beispiel #6
0
def upload_job_to_s3(service_id, file_data):
    upload_id = str(uuid.uuid4())
    bucket, location = get_job_location(service_id, upload_id)
    utils_s3upload(
        filedata=file_data,
        region=current_app.config["AWS_REGION"],
        bucket_name=bucket,
        file_location=location,
    )
    return upload_id
Beispiel #7
0
def s3upload(service_id, filedata, region):
    client = clients.csv

    upload_id = str(uuid.uuid4())
    upload_file_name = FILE_LOCATION_STRUCTURE.format(service_id, upload_id)
    utils_s3upload(filedata=filedata['data'],
                   region=region,
                   bucket_name=client.bucket_name,
                   file_location=upload_file_name,
                   session=client.session)
    return upload_id
Beispiel #8
0
def upload_letter_temp_logo(filename, filedata, region, user_id):
    upload_filename = LETTER_TEMP_LOGO_LOCATION.format(user_id=user_id, unique_id=str(uuid.uuid4()), filename=filename)
    bucket_name = current_app.config["LOGO_UPLOAD_BUCKET_NAME"]
    utils_s3upload(
        filedata=filedata,
        region=region,
        bucket_name=bucket_name,
        file_location=upload_filename,
        content_type="image/svg+xml",
    )

    return upload_filename
def upload_email_logo(filename, filedata, region, user_id):
    upload_file_name = EMAIL_LOGO_LOCATION_STRUCTURE.format(
        temp=TEMP_TAG.format(user_id=user_id),
        unique_id=str(uuid.uuid4()),
        filename=filename)
    bucket_name = current_app.config['LOGO_UPLOAD_BUCKET_NAME']
    utils_s3upload(filedata=filedata,
                   region=region,
                   bucket_name=bucket_name,
                   file_location=upload_file_name,
                   content_type='image/png')

    return upload_file_name
def zip_and_send_letter_pdfs(filenames_to_zip, upload_filename):
    folder_date = filenames_to_zip[0].split('/')[0]
    zips_sent_filename = '{}/zips_sent/{}.TXT'.format(folder_date,
                                                      upload_filename)

    current_app.logger.info(
        "Starting to zip {file_count} letter PDFs in memory from {folder} into dvla file {upload_filename}"
        .format(  # noqa
            file_count=len(filenames_to_zip),
            folder=folder_date,
            upload_filename=upload_filename))

    try:
        if file_exists_on_s3(current_app.config['LETTERS_PDF_BUCKET_NAME'],
                             zips_sent_filename):
            current_app.logger.warning(
                '{} already exists in S3, skipping DVLA upload'.format(
                    zips_sent_filename))
            return

        zip_data = get_zip_of_letter_pdfs_from_s3(filenames_to_zip)

        ftp_client.send_zip(zip_data, upload_filename)

        # upload a record to s3 of each zip file we send to DVLA - this is just a list of letter filenames so we can
        # match up their references with DVLA
        utils_s3upload(
            filedata=json.dumps(filenames_to_zip).encode(),
            region=current_app.config['AWS_REGION'],
            bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'],
            file_location=zips_sent_filename)
    except ClientError:
        current_app.logger.exception(
            'FTP app failed to download PDF from S3 bucket {}'.format(
                folder_date))
        task_name = "update-letter-notifications-to-error"
    except FtpException:
        try:
            # check if file exists with the right size.
            # It has happened that an IOError occurs but the files are present on the remote server.
            ftp_client.file_exists_with_correct_size(upload_filename,
                                                     len(zip_data))
            task_name = "update-letter-notifications-to-sent"
        except FtpException:
            current_app.logger.exception('FTP app failed to send api messages')
            task_name = "update-letter-notifications-to-error"
    else:
        task_name = "update-letter-notifications-to-sent"

    refs = get_notification_references_from_s3_filenames(filenames_to_zip)
    update_notifications(task_name, refs)
Beispiel #11
0
def upload_logo(filename, filedata, region, user_id):
    client = clients.logo

    upload_file_name = LOGO_LOCATION_STRUCTURE.format(
        temp=TEMP_TAG.format(user_id=user_id),
        unique_id=str(uuid.uuid4()),
        filename=filename)

    utils_s3upload(filedata=filedata,
                   region=region,
                   bucket_name=client.bucket_name,
                   file_location=upload_file_name,
                   content_type='image/png',
                   acl='public-read',
                   session=client.session)

    return upload_file_name