def save_csv_file(file_data, file_name, metadata): """Given the campaign model use its ID to name the file and save the photo to S3. :param file_data: CSV stream data to save to S3. :param file_name: The file name. :param metadata: File metadata for WebStorage. """ # Get the file_type: file_storage.content_type is something like 'image/png'. WebStorage.save(file_name, file_data.getvalue(), metadata)
def save_image(file_name, file_storage): """Save an image to AWS S3. :param file_name: Something like 2.jpeg :param file_storage: The file storage object :return: """ try: file_storage.seek(0) WebStorage.save(file_name, file_storage.read(), ('Campaign', file_name)) except BotoClientError as error: raise error
def get_s3_file_list( query_terms ): """Get a file list from the given bucket and path given in the query terms. query_terms will look like: donation/s3/download?bucket=nusa-dev-testing&path=apeters/ :param query_terms: See example in the URL above. :return: A list of files in the given bucket at the given path. """ query_term_keys = set( query_terms.keys() ) not_matching_terms = len( query_term_keys.symmetric_difference( { 'bucket', 'path' } ) ) if not_matching_terms: raise FileManagementIncompleteQueryString WebStorage.init_storage( current_app, query_terms[ 'bucket' ][ 'eq' ], query_terms[ 'path' ][ 'eq' ] ) return WebStorage.get_list_of_bucket_files()
def build_thank_you_letters_csv(thank_you_dicts): """Build the thank you letter CSVs. :param thank_you_dicts: A dictionary containing transaction, gift and user data. :return: A URL to the CSV file on AWS S3. """ header = [ 'DateOfGift', 'Amount', 'Account', 'UserID', 'TransactionID', 'CheckNumber', 'Shortfirstname', 'Honorific', 'Firstname', 'Lastname', 'Suffix', 'Address', 'City', 'State', 'Zip' ] WebStorage.init_storage(current_app, current_app.config['AWS_CSV_FILES_BUCKET'], current_app.config['AWS_CSV_FILES_PATH']) results = [] for thank_you_dict in thank_you_dicts: result = [ thank_you_dict['gift']['date_in_utc'], thank_you_dict['transaction']['gross_gift_amount'], thank_you_dict['gift']['given_to'], thank_you_dict['gift']['user_id'], thank_you_dict['gift']['transaction_id'], thank_you_dict['transaction']['reference_number'], thank_you_dict['user']['user_address']['user_short_first_name'], thank_you_dict['user']['user_address']['user_honorific'], thank_you_dict['user']['user_address']['user_first_name'], thank_you_dict['user']['user_address']['user_suffix'], thank_you_dict['user']['user_address']['user_address'], thank_you_dict['user']['user_address']['user_city'], thank_you_dict['user']['user_address']['user_state'], thank_you_dict['user']['user_address']['user_zipcode'] ] results.append(result) url = None if results: file_name = build_flat_bytesio_csv(results, header, 'thank_you_letters', True) url = WebStorage.generate_presigned_url( current_app.config['AWS_CSV_FILES_BUCKET'], current_app.config['AWS_CSV_FILES_PATH'] + file_name) return url
def generate_url( file_name ): """Generate a pre-signed URL to download file from AWS S3""" url = WebStorage.generate_presigned_url( app.config[ 'AWS_CSV_FILES_BUCKET' ], app.config[ 'AWS_CSV_FILES_PATH' ] + file_name ) return url
def get_s3_file_path( campaign_id ): """Given the argument for the Campiagn ID build the file path to AWS S3. :param campaign_id: The CampaignModel ID ( CampaignModel.id ). :return: File path to image. """ try: campaign_model = CampaignModel.query.filter_by( id=campaign_id ).one() file_type = campaign_model.photo_type except SQLAlchemyORMNoResultFoundError as error: raise error WebStorage.init_storage( current_app ) s3_path = WebStorage.get_s3_path() image_file_path = '{}{}.{}'.format( s3_path, campaign_id, file_type ) return image_file_path
def get_s3_file( query_terms ): """Download a file to the users local drive at the path given in the query parameters. query_terms will look like: donation/s3/download?bucket=nusa-dev-testing&path=apeters/&file_name=dispute.csv&local_path=/tmp/dispute.csv :param query_terms: See example in the URL above. :return: A file in the bucket downloaded to the given local path. """ query_term_keys = set( query_terms.keys() ) not_matching_terms = len( query_term_keys.symmetric_difference( { 'bucket', 'path', 'file_name', 'local_path' } ) ) if not_matching_terms: raise FileManagementIncompleteQueryString WebStorage.init_storage( current_app, query_terms[ 'bucket' ][ 'eq' ], query_terms[ 'path' ][ 'eq' ] ) WebStorage.get_bucket_file( query_terms[ 'file_name' ][ 'eq' ], query_terms[ 'local_path' ][ 'eq' ] )
def build_campaign_model_file(file_data, campaign_model, create): """Given the campaign model save the photo to S3. For creating a campaign the payload either has a photo ( save ) or doesn't ( don't save ). For an update the logic is a little bit more involved: if campaign_model.data.photo == '' and file_storage.read( 3 ) == b'': There is no photo on the model and no photo to save: don't delete && don't save. elif campaign_model.data.photo != '' and file_storage.read( 3 ) == b'': There is a photo on the model and no photo to save: don't delete && don't save. elif file_storage.read( 3 ) != b'': if campaign_model.data.photo == '': There is no photo on the model and a photo to save: save. elif campaign_model.data.photo != '': There is a photo on the model and a photo to save: delete && save. :param file_data: The file storage object. :param campaign_model: The campaign model. :param create: Whether to create a new campaign or update an existing one. :return: """ # With the campaign ID save the file to AWS and attach photo type to the model. # The path to the file is given by the campaign ID and the type. Something like: # image_path = aws_base_url + aws_bucket + '/' aws_path + campaign_id + '.' + file_type file_storage_keys = [file_key for file_key in file_data.keys()] if file_storage_keys: file_storage_key = file_storage_keys[0] else: return file_storage = file_data[file_storage_key] file_type = file_storage.content_type[file_storage.content_type.find('/') + 1:] file_name = '{}.{}'.format(campaign_model.data.id, file_type) if create: if file_storage.read(3) == b'': campaign_model.data.photo_type = '' else: WebStorage.init_storage(current_app, current_app.config['AWS_DEFAULT_BUCKET'], current_app.config['AWS_DEFAULT_PATH']) save_image(file_name, file_storage) campaign_model.data.photo_type = file_type elif not create: if file_storage.read(3) != b'': WebStorage.init_storage(current_app, current_app.config['AWS_DEFAULT_BUCKET'], current_app.config['AWS_DEFAULT_PATH']) if campaign_model.data.photo_type != '': # Photo on model and photo to save: delete && save. file_name_to_delete = '{}.{}'.format( campaign_model.data.id, campaign_model.data.photo_type) WebStorage.delete(file_name_to_delete) save_image(file_name, file_storage) campaign_model.data.photo_type = file_type
def process_paypal_etl(enacted_by_agent_id, reader_list, file_storage): """Handle the logic for PayPal ETL. :param enacted_by_agent_id: The administrative user ID. :param reader_list: The CSV validated and converted to a list of ordered dictionaries. :param file_storage_name: The file storage name. :return: """ data_from_models = get_data_from_models() ids = { 'transaction': data_from_models['transaction_ids'], 'unresolved_transaction': data_from_models['unresolved_transaction_ids'] } agent_emails = get_agent_emails() # Using for SQLAlchemy bulk_save_objects(). bulk_objects = { 'transaction': [], 'caged_donor': [], 'unresolved_transaction': [] } # Start the loop. for row in reader_list: if row['transaction_id'] in data_from_models['transaction_ids']: # This transaction already exists in our database. continue transaction_type = row['type'] if transaction_type in VALID_GIFT_TRANSACTION_TYPES: valid_paypal_transaction(row, enacted_by_agent_id, agent_emails, ids, bulk_objects) elif transaction_type in REFUND_PAYPAL_TRANSACTION_TYPES: refund_paypal_transaction(row, enacted_by_agent_id, ids, bulk_objects) elif transaction_type in DISPUTE_PAYPAL_TRANSACTION_TYPES: # Handle the PayPal disputes. dispute_paypal_transaction(row, enacted_by_agent_id, ids, bulk_objects) elif transaction_type in USELESS_PAYPAL_TRANSACTION_TYPES: # Culling extraneous transaction types from PayPal. continue else: # CSV file might come with some types we don't know how to process yet. bulk_objects['unresolved_transaction'] += filter( None, [ generate_unresolved_transaction( row, data_from_models['unresolved_transaction_ids'], enacted_by_agent_id) ]) # Store into PayPal_ETL table. file_storage_name = file_storage.filename paypal_etl = PaypalETLModel() paypal_etl.enacted_by_agent_id = enacted_by_agent_id file_info = file_storage_name.split('.') file_date = datetime.utcnow() file_name = '.'.join(file_info[:-1]) + '_' + file_date.strftime( '%Y-%m-%d %H:%M:%S') + '.' + file_info[-1] paypal_etl.file_name = file_name paypal_etl.date_in_utc = file_date database.session.add(paypal_etl) # Bulk save various objects. database.session.bulk_save_objects(bulk_objects['transaction']) database.session.bulk_save_objects(bulk_objects['caged_donor']) database.session.bulk_save_objects(bulk_objects['unresolved_transaction']) # Commit to the database. try: database.session.commit() except SQLAlchemyError: database.session.rollback() raise PayPalETLOnCommitError else: # Store file on AWS S3 and return link. file_storage.seek(0) WebStorage.init_storage(current_app, current_app.config.get('AWS_CSV_FILES_BUCKET'), current_app.config.get('AWS_CSV_FILES_PATH')) metadata = ('Paypal ETL', file_name) WebStorage.save(file_name, file_storage.read(), metadata) return True
def get_transactions_for_csv( query_terms ): """ Query all transactions and call a function to save them into a .csv file, then put the file into S3. :return: Return a signed URL that users can download from S3 """ try: sourced_from_agent = aliased( AgentModel ) enacted_by_agent = aliased( AgentModel ) method_used = aliased( MethodUsedModel ) query = database.session.query( GiftModel.id, GiftModel.searchable_id, GiftModel.user_id, GiftModel.method_used_id, method_used.name, GiftModel.given_to, GiftModel.recurring_subscription_id, TransactionModel.date_in_utc, TransactionModel.receipt_sent_in_utc, enacted_by_agent.id, enacted_by_agent.name, TransactionModel.type, TransactionModel.status, TransactionModel.reference_number, TransactionModel.gross_gift_amount, TransactionModel.fee, TransactionModel.notes ). \ join( TransactionModel, GiftModel.id == TransactionModel.gift_id ). \ join( sourced_from_agent, GiftModel.sourced_from_agent_id == sourced_from_agent.id ). \ join( enacted_by_agent, TransactionModel.enacted_by_agent_id == enacted_by_agent.id ). \ join( method_used, GiftModel.method_used_id == method_used.id ) if query_terms: filters = build_filters( query_terms ) if 'gift' in filters and filters[ 'gift' ]: query = query_set( GiftModel, query, filters[ 'gift' ] ) if 'transaction' in filters and filters[ 'transaction' ]: query = query_set( TransactionModel, query, filters[ 'transaction' ] ) results = query.all() except SQLAlchemyError as error: raise error header = [ 'gift_id', 'searchable_gift_id', 'user_id', 'method_used_id', 'method_used_name', 'given_to', 'recurring_subscription_id', 'transaction_date_in_utc', 'receipt_sent_in_utc', 'transaction_agent_id', 'transaction_agent_name', 'transaction_type', 'transaction_status', 'reference_number', 'transaction_gross_amount', 'transaction_fee', 'transaction_notes' ] WebStorage.init_storage( current_app, current_app.config[ 'AWS_CSV_FILES_BUCKET' ], current_app.config[ 'AWS_CSV_FILES_PATH' ] ) file_name = build_flat_bytesio_csv( results, header, 'transactions', True ) url = WebStorage.generate_presigned_url( current_app.config[ 'AWS_CSV_FILES_BUCKET' ], current_app.config[ 'AWS_CSV_FILES_PATH' ] + file_name ) return url
def get_cron_for_csv(): """A function to be called as a cron job to retrieve a full diump of the donate database to a CSV.""" # Open the stream for CSV and write the header. output = io.BytesIO() output.write(','.join(HEADER).encode()) output.write('\n'.encode()) logging.info('') logging.info('1. Open the production DB connection.') # Handle vault tokens. vault = get_vault_data(app.config['VAULT_URL'], app.config['VAULT_TOKEN'], app.config['VAULT_SECRET']) # Create the database connection. dump_conn = pymysql.connect(host=app.config['DUMP_SQLALCHEMY_HOST'], port=int(app.config['DUMP_SQLALCHEMY_PORT']), user=vault['data']['username'], passwd=vault['data']['password'], db=app.config['DUMP_SQLALCHEMY_DB']) logging.info('2. Get the data.') # Get the MySQL query to extract data from the database. sql_query = query_transactions_for_csv() # Get the cursor and perform query, with the MySQL equivalent of nolock on the database. dump_cursor = dump_conn.cursor() with dump_cursor as cursor: cursor.execute( 'SET SESSION TRANSACTION ISOLATION LEVEL READ UNCOMMITTED;') cursor.execute(sql_query) rows = list(cursor) cursor.execute( 'SET SESSION TRANSACTION ISOLATION LEVEL REPEATABLE READ;') logging.info('3. Write the data.') # Write the query data to the output stream. for row in rows: output.write(','.join(map(str, row)).encode()) output.write('\n'.encode()) logging.info('4. Save the data.') # Save the data to AS S3 and get the URL. metadata = ('Transaction Updater', FILE_NAME) WebStorage.save(FILE_NAME, output.getvalue(), metadata) url = WebStorage.generate_presigned_url( app.config['AWS_CSV_FILES_BUCKET'], app.config['AWS_CSV_FILES_PATH'] + FILE_NAME) # Send a notification email to the group. email = app.config['STATISTICS_GROUP_EMAIL'] data = {'email': email, 'urls': url} ultsys_email_api_key = app.config['ULTSYS_EMAIL_API_KEY'] ultsys_email_url = app.config['ULTSYS_EMAIL_URL'] headers = { 'content-type': 'application/json', 'X-Temporary-Service-Auth': ultsys_email_api_key } requests.post(ultsys_email_url, params=data, headers=headers) logging.info(url)
import requests from s3_web_storage.web_storage import WebStorage from application.app import create_app from application.helpers.general_helper_functions import get_vault_data from application.helpers.sql_queries import query_transactions_for_csv # pylint: disable=bare-except # pylint: disable=no-member # pylint: disable=invalid-name # Check for how the application is being run and use that. # The environment variable is set in the Dockerfile. app_config_env = os.environ['APP_ENV'] # pylint: disable=invalid-name app = create_app(app_config_env) # pylint: disable=C0103 WebStorage.init_storage(app, app.config['AWS_CSV_FILES_BUCKET'], app.config['AWS_CSV_FILES_PATH']) HEADER = [ 'gift_id', 'method_used', 'given_to', 'given_by_user_id', 'originating_agent_name', 'originating_agent_id', 'searchable_gift_id', 'gift_id', 'reference_number', 'transaction_agent_name', 'transaction_agent_id', 'transaction_type', 'transaction_status', 'transaction_date', 'transaction_gross', 'transaction_fee', 'notes' ] FILE_TYPE = 'csv' FILE_PREFIX = 'full_database_dump_at' FILE_DATETIME = datetime.now().strftime('%Y_%m_%d') FILE_NAME = '{}_{}.{}'.format(FILE_PREFIX, FILE_DATETIME, FILE_TYPE)
def create_app(app_config_env=None): """Application factory. Allows the application to be instantiated with a specific configuration, e.g. configurations for development, testing, and production. Implements a configuration loader to augment the Flask app.config() in loading these configurations. Supports YAML and tagged environment variables. Manages the application logging level. :param str app_config_env: The configuration name to use in loading the configuration variables. :return: The Flask application. """ # Set the ENV variable in the Dockerfile. If we can't find a value set the app_config_env to DEFAULT. if not app_config_env: if 'APP_ENV' in os.environ: app_config_env = os.environ['APP_ENV'] else: app_config_env = 'DEFAULT' # See if the default logging should be set to DEBUG instead of WARNING. if 'OVERRIDE_LOGGING' in os.environ: override_logging = os.environ['OVERRIDE_LOGGING'] else: override_logging = False app = Flask('donate_api') conf_root = os.path.join(os.path.dirname(__file__), '..', 'configuration') importlib.import_module('configuration') configuration_module = importlib.import_module('.config_loader', package='configuration') configuration = configuration_module.ConfigLoader() configuration.update_from_yaml_file(os.path.join(conf_root, 'conf.yml'), app_config_env) configuration.update_from_env_variables(app_config_env) app.config.update(configuration) app.config.update({'ENV': app_config_env}) wsgi_log_level = 'WARNING' gunicorn_log_level = 'WARNING' # Set the level of the root logger. if 'WSGI_LOG_LEVEL' in app.config and app.config['WSGI_LOG_LEVEL'] != '': wsgi_log_level = app.config['WSGI_LOG_LEVEL'] if 'GUNICORN_LOG_LEVEL' in app.config and app.config[ 'GUNICORN_LOG_LEVEL'] != '': gunicorn_log_level = app.config['GUNICORN_LOG_LEVEL'] # If running gunicorn add gunicorn.error to handlers. gunicorn = False if __name__ != '__main__': gunicorn = True dictConfig( get_logging_configuration(wsgi_log_level, gunicorn_log_level, gunicorn)) logging.root.log(logging.root.level, '***** Logging is enabled for this level.') logging.root.log(logging.root.level, '***** app.config[ SQLALCHEMY_DATABASE_URI ]: %s', app.config['SQLALCHEMY_DATABASE_URI']) logging.root.log(logging.root.level, '***** app.config[ MYSQL_DATABASE ] : %s', app.config['MYSQL_DATABASE']) database.init_app(app) redis_queue.init_app(app) jwt.init_app(app) # Absolutely needed for JWT errors to work correctly in production app.config.update(PROPAGATE_EXCEPTIONS=True) if 'INITIALIZE_WEB_STORAGE' in app.config and app.config[ 'INITIALIZE_WEB_STORAGE']: WebStorage.init_storage(app, app.config['AWS_DEFAULT_BUCKET'], app.config['AWS_DEFAULT_PATH']) api = Api(app) api.add_resource(Agents, '/donation/agents') api.add_resource(DashboardData, '/donation/dashboard/<string:data_type>') api.add_resource(DonateGetToken, '/donation/braintree/get-token') api.add_resource(Donors, '/donation/donors/<string:donor_type>') api.add_resource(CageDonorAsUltsysUser, '/donation/cage') api.add_resource(CageDonorUpdate, '/donation/cage/update') api.add_resource(CampaignsByActive, '/donation/campaigns/active/<int:zero_or_one>') api.add_resource(CampaignsByDefault, '/donation/campaigns/default/<int:zero_or_one>') api.add_resource(GetCampaignById, '/donation/campaigns/<int:campaign_id>') api.add_resource(ManageCampaigns, '/donation/campaigns') api.add_resource(AmountsByCampaignId, '/donation/campaigns/<int:campaign_id>/amounts') api.add_resource(Donation, '/donation/donate') api.add_resource( Enumeration, '/donation/enumeration/<string:model>/<string:attribute>') api.add_resource( GiftsByPartialSearchableId, '/donation/gifts/uuid_prefix/<string:searchable_id_prefix>') api.add_resource(GiftByUserId, '/donation/gift/user/<int:user_id>', '/donation/gift/user') api.add_resource(Gifts, '/donation/gifts') api.add_resource(GiftsByDate, '/donation/gifts/date') api.add_resource(GiftsByGivenTo, '/donation/gifts/given-to') api.add_resource(GiftUpdateNote, '/donation/gift/<string:searchable_id>/notes') api.add_resource(GiftsThankYouLetter, '/donation/gifts/not-yet-thanked') api.add_resource(GiftsSendThankYouLetter, '/donation/gifts/send-thank-you-letters') api.add_resource(TransactionsByGift, '/donation/gifts/<string:searchable_id>/transactions') api.add_resource(TransactionBuild, '/donation/gift/transaction') api.add_resource(TransactionsByGifts, '/donation/gifts/transactions') api.add_resource(Heartbeat, '/donation/heartbeat') api.add_resource(DonateAdminCorrection, '/donation/correction') api.add_resource(DonateAdminRecordBouncedCheck, '/donation/record-bounced-check') api.add_resource(DonateAdminRefund, '/donation/refund') api.add_resource(DonateReprocessQueuedDonors, '/donation/reprocess-queued-donors') api.add_resource(GetS3File, '/donation/s3/csv/download') api.add_resource(GetS3FileList, '/donation/s3/csv/files') api.add_resource(GetS3FilePath, '/donation/s3/campaign/<int:campaign_id>/file-path') api.add_resource(GetBraintreeSaleStatus, '/donation/transaction/status/<int:transaction_id>') api.add_resource(TransactionsByIds, '/donation/transactions') api.add_resource(TransactionsById, '/donation/transactions/<int:transaction_id>') api.add_resource(TransactionsByGrossGiftAmount, '/donation/transactions/gross-gift-amount') api.add_resource(TransactionsForCSV, '/donation/transactions/csv') api.add_resource(UltsysUser, '/donation/user') api.add_resource(DonateAdminVoid, '/donation/void') api.add_resource(BraintreeWebhookSubscription, '/donation/webhook/braintree/subscription') api.add_resource(PaypalETL, '/donation/paypal-etl') @app.after_request def after_request(response): # pylint: disable=unused-variable """A handler for defining response headers. :param response: an HTTP response object :return: """ response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type, Authorization') response.headers.add('Access-Control-Allow-Methods', 'GET, PUT, POST, DELETE') response.headers.add('Access-Control-Expose-Headers', 'Link') return response @app.errorhandler(UltsysUserBadRequestError) def handle_400(error): # pylint: disable=unused-variable """HTTP status 400 ( bad request ) error handler. :param error: Error message raised by exception. :return: """ response = jsonify(handle_error_message(error)) response.status_code = 400 return response @app.errorhandler(BraintreeInvalidSignatureError) def handle_401(error): # pylint: disable=unused-variable """HTTP status 401 ( unauthorized ) error handler. :param error: Error message raised by exception. :return: """ response = jsonify(handle_error_message(error)) response.status_code = 401 return response @app.errorhandler(AdminTransactionModelPathError) @app.errorhandler(BraintreeNotFoundError) @app.errorhandler(SQLAlchemyORMNoResultFoundError) @app.errorhandler(UltsysUserNotFoundError) @app.errorhandler(ModelGiftNotFoundError) @app.errorhandler(ModelCagedDonorNotFoundError) @app.errorhandler(ModelTransactionNotFoundError) @app.errorhandler(AttributeError) @app.errorhandler(KeyError) def handle_404(error): # pylint: disable=unused-variable """HTTP status 404 ( not found ) error handler. :param error: Error message raised by exception. :return: """ response = jsonify(handle_error_message(error)) response.status_code = 404 return response @app.errorhandler(BraintreeNotInSettlingOrSettledError) @app.errorhandler(BraintreeNotInSubmittedForSettlementError) @app.errorhandler(BraintreeNotIsSuccessError) @app.errorhandler(BraintreeRefundWithNegativeAmountError) @app.errorhandler(ModelCampaignImproperFieldError) @app.errorhandler(ModelGiftImproperFieldError) @app.errorhandler(ModelTransactionImproperFieldError) @app.errorhandler(FileManagementIncompleteQueryString) @app.errorhandler(UUIDLessThanFiveCharsError) @app.errorhandler(UltsysUserMultipleFoundError) @app.errorhandler(JWTRequestError) def handle_422(error): # pylint: disable=unused-variable """HTTP status 422 ( unprocessable entity ) error handler. :param error: Error message raised by exception. :return: """ response = jsonify(handle_error_message(error)) response.status_code = 422 return response @app.errorhandler(AdminBuildModelsPathError) @app.errorhandler(BraintreeAttributeError) @app.errorhandler(MarshmallowValidationError) @app.errorhandler(SQLAlchemyError) @app.errorhandler(UltsysUserHTTPStatusCodeError) @app.errorhandler(UltsysUserInternalServerError) @app.errorhandler(ValueError) @app.errorhandler(BotoClientError) @app.errorhandler(QueryStringImproperError) @app.errorhandler(EmailHTTPStatusError) @app.errorhandler(CampaignIsDefaultError) def handle_500(error): # pylint: disable=unused-variable """HTTP status 500 ( internal server error ) error handler. :param error: Error message raised by exception. :return: """ response = jsonify(handle_error_message(error)) response.status_code = 500 return response def handle_error_message(error): """Used by error handlers for handling error and error.message. :param error: The error raised by the exception. :return: return the error message. """ if hasattr(error, 'message'): logging.exception(error.message) return error.message if hasattr(error, 'args'): logging.exception(error.args) return error.args if hasattr(error, 'response'): # This is a BotoCore Client Error ( AWS S3 ). logging.exception(error.response['Error']) return error.response['Error'] logging.exception(error) return error return app