コード例 #1
0
def save_csv_file(file_data, file_name, metadata):
    """Given the campaign model use its ID to name the file and save the photo to S3.

    :param file_data: CSV stream data to save to S3.
    :param file_name: The file name.
    :param metadata: File metadata for WebStorage.
    """

    # Get the file_type: file_storage.content_type is something like 'image/png'.
    WebStorage.save(file_name, file_data.getvalue(), metadata)
コード例 #2
0
def save_image(file_name, file_storage):
    """Save an image to AWS S3.

    :param file_name: Something like 2.jpeg
    :param file_storage: The file storage object
    :return:
    """

    try:
        file_storage.seek(0)
        WebStorage.save(file_name, file_storage.read(),
                        ('Campaign', file_name))
    except BotoClientError as error:
        raise error
コード例 #3
0
def process_paypal_etl(enacted_by_agent_id, reader_list, file_storage):
    """Handle the logic for PayPal ETL.

    :param enacted_by_agent_id: The administrative user ID.
    :param reader_list: The CSV validated and converted to a list of ordered dictionaries.
    :param file_storage_name: The file storage name.
    :return:
    """

    data_from_models = get_data_from_models()
    ids = {
        'transaction': data_from_models['transaction_ids'],
        'unresolved_transaction':
        data_from_models['unresolved_transaction_ids']
    }

    agent_emails = get_agent_emails()

    # Using for SQLAlchemy bulk_save_objects().
    bulk_objects = {
        'transaction': [],
        'caged_donor': [],
        'unresolved_transaction': []
    }

    # Start the loop.
    for row in reader_list:

        if row['transaction_id'] in data_from_models['transaction_ids']:
            # This transaction already exists in our database.
            continue

        transaction_type = row['type']

        if transaction_type in VALID_GIFT_TRANSACTION_TYPES:
            valid_paypal_transaction(row, enacted_by_agent_id, agent_emails,
                                     ids, bulk_objects)
        elif transaction_type in REFUND_PAYPAL_TRANSACTION_TYPES:
            refund_paypal_transaction(row, enacted_by_agent_id, ids,
                                      bulk_objects)
        elif transaction_type in DISPUTE_PAYPAL_TRANSACTION_TYPES:
            # Handle the PayPal disputes.
            dispute_paypal_transaction(row, enacted_by_agent_id, ids,
                                       bulk_objects)
        elif transaction_type in USELESS_PAYPAL_TRANSACTION_TYPES:
            # Culling extraneous transaction types from PayPal.
            continue
        else:
            # CSV file might come with some types we don't know how to process yet.
            bulk_objects['unresolved_transaction'] += filter(
                None, [
                    generate_unresolved_transaction(
                        row, data_from_models['unresolved_transaction_ids'],
                        enacted_by_agent_id)
                ])

    # Store into PayPal_ETL table.
    file_storage_name = file_storage.filename
    paypal_etl = PaypalETLModel()
    paypal_etl.enacted_by_agent_id = enacted_by_agent_id
    file_info = file_storage_name.split('.')
    file_date = datetime.utcnow()
    file_name = '.'.join(file_info[:-1]) + '_' + file_date.strftime(
        '%Y-%m-%d %H:%M:%S') + '.' + file_info[-1]
    paypal_etl.file_name = file_name
    paypal_etl.date_in_utc = file_date
    database.session.add(paypal_etl)

    # Bulk save various objects.
    database.session.bulk_save_objects(bulk_objects['transaction'])
    database.session.bulk_save_objects(bulk_objects['caged_donor'])
    database.session.bulk_save_objects(bulk_objects['unresolved_transaction'])

    # Commit to the database.
    try:
        database.session.commit()
    except SQLAlchemyError:
        database.session.rollback()
        raise PayPalETLOnCommitError
    else:
        # Store file on AWS S3 and return link.
        file_storage.seek(0)
        WebStorage.init_storage(current_app,
                                current_app.config.get('AWS_CSV_FILES_BUCKET'),
                                current_app.config.get('AWS_CSV_FILES_PATH'))
        metadata = ('Paypal ETL', file_name)
        WebStorage.save(file_name, file_storage.read(), metadata)
        return True
コード例 #4
0
def get_cron_for_csv():
    """A function to be called as a cron job to retrieve a full diump of the donate database to a CSV."""

    # Open the stream for CSV and write the header.
    output = io.BytesIO()
    output.write(','.join(HEADER).encode())
    output.write('\n'.encode())

    logging.info('')
    logging.info('1. Open the production DB connection.')

    # Handle vault tokens.
    vault = get_vault_data(app.config['VAULT_URL'], app.config['VAULT_TOKEN'],
                           app.config['VAULT_SECRET'])

    # Create the database connection.
    dump_conn = pymysql.connect(host=app.config['DUMP_SQLALCHEMY_HOST'],
                                port=int(app.config['DUMP_SQLALCHEMY_PORT']),
                                user=vault['data']['username'],
                                passwd=vault['data']['password'],
                                db=app.config['DUMP_SQLALCHEMY_DB'])

    logging.info('2. Get the data.')

    # Get the MySQL query to extract data from the database.
    sql_query = query_transactions_for_csv()

    # Get the cursor and perform query, with the MySQL equivalent of nolock on the database.
    dump_cursor = dump_conn.cursor()
    with dump_cursor as cursor:
        cursor.execute(
            'SET SESSION TRANSACTION ISOLATION LEVEL READ UNCOMMITTED;')
        cursor.execute(sql_query)
        rows = list(cursor)
        cursor.execute(
            'SET SESSION TRANSACTION ISOLATION LEVEL REPEATABLE READ;')

    logging.info('3. Write the data.')

    # Write the query data to the output stream.
    for row in rows:
        output.write(','.join(map(str, row)).encode())
        output.write('\n'.encode())

    logging.info('4. Save the data.')

    # Save the data to AS S3 and get the URL.
    metadata = ('Transaction Updater', FILE_NAME)
    WebStorage.save(FILE_NAME, output.getvalue(), metadata)
    url = WebStorage.generate_presigned_url(
        app.config['AWS_CSV_FILES_BUCKET'],
        app.config['AWS_CSV_FILES_PATH'] + FILE_NAME)

    # Send a notification email to the group.
    email = app.config['STATISTICS_GROUP_EMAIL']
    data = {'email': email, 'urls': url}
    ultsys_email_api_key = app.config['ULTSYS_EMAIL_API_KEY']
    ultsys_email_url = app.config['ULTSYS_EMAIL_URL']
    headers = {
        'content-type': 'application/json',
        'X-Temporary-Service-Auth': ultsys_email_api_key
    }
    requests.post(ultsys_email_url, params=data, headers=headers)

    logging.info(url)