def put_json_object_to_s3(data_object, s3_file_path, bucket_name): """Upload the updated file to back S3.""" s3_client = boto3.resource(ReferenceKeys.S3_REFERENCE) s3_client.Bucket(bucket_name).put_object(Body=json.dumps(data_object), Bucket=bucket_name, Key=s3_file_path) LoggerUtility.log_debug("Put object to S3 completed: " + s3_file_path)
def upload_file_object(file_path, bucket_name, object_name): """Upload file object to S3.""" s3_client = boto3.client(ReferenceKeys.S3_REFERENCE) with open(file_path, 'rb') as file_content: s3_client.upload_fileobj(file_content, bucket_name, object_name) LoggerUtility.log_debug("File upload to " + bucket_name + " bucket completed. File name: " + object_name)
def put_object(data_object, bucket_name, object_name): """Upload data object to S3.""" s3_client = boto3.client(ReferenceKeys.S3_REFERENCE) # Using Put Object.Another option is to use download file/upload file from boto3 s3_client.put_object(Body=str(data_object).encode(), Bucket=bucket_name, Key=object_name) LoggerUtility.log_debug("Put object on S3 completed: " + object_name)
def list_objects(bucket_name, object_prefix): """List all objects inside S3 bucket.""" s3_client = boto3.client(ReferenceKeys.S3_REFERENCE) bucket_objects = s3_client.list_objects_v2(Bucket=bucket_name, Prefix=object_prefix) if ReferenceKeys.CONTENTS_REFERENCE in bucket_objects: LoggerUtility.log_debug("Found '" + str(len(bucket_objects)) + "' objects in bucket!") return bucket_objects[ReferenceKeys.CONTENTS_REFERENCE] return []
def __init__(self): """Class constructor.""" try: client_id = os.environ[EnvironmentVariables.IOT - CLIENT - ID] port_no = int(os.environ[EnvironmentVariables.PORT]) iot_endpoint = os.environ[EnvironmentVariables.IOT_ENDPOINT] except KeyError as port_not_configured: # Use default port if environment variable is not set LoggerUtility.log_warning( str(port_not_configured) + " not configured, using default port!") port_no = Constants.IOT_DEFAULT_PORT_NO
def download_file_from_s3(s3_file_path, bucket_name): """Download file from s3 .""" s3_client = boto3.client(ReferenceKeys.S3_REFERENCE) # Using file path in lambda under tmp directory filepath = '/tmp/' + 'file1.json' try: s3_client.download_file(bucket_name, s3_file_path, filepath) LoggerUtility.log_info("File downloaded from S3 completed: " + s3_file_path) return filepath except botocore.exceptions.ClientError as client_error: if client_error.response['Error']['Code'] == "404": LoggerUtility.log_error("The object does not exist.") else: raise Exception("FATAL: " + str(client_error) + " not found!")
def __init__(self): """Class constructor.""" try: port_no = int(os.environ[ EnvironmentVariables.ELASTICACHE_REDIS_PORT_ENV_VAR]) except KeyError as port_not_configured: # Use default port if environment variable is not set LoggerUtility.log_warning( str(port_not_configured) + " not configured, using default port!") port_no = Constants.REDIS_CACHE_DEFAULT_PORT_NO self.__redis_client = redis.StrictRedis(host=os.environ[ EnvironmentVariables.ELASTICACHE_REDIS_ENDPOINT_ENV_VAR], port=port_no, db=0)
def get_data_from_rds(sql_connection, sql_query): """Fetch data from RDS.""" query_result = {} try: with sql_connection.cursor() as cursor: cursor.execute(sql_query) # Only for fetching all values query_result = cursor.fetchall() # Use cursor.commit() for making commiting changes to RDS LoggerUtility.log_info("Database transaction successful!") except Exception as transaction_error: LoggerUtility.log_error( "Failed to perform database transaction: " + str(transaction_error)) return query_result
def delete_objects(bucket_name, object_list): """Delete objects from S3 bucket from an object list.""" s3_delete_object_list = [] for s3_key in object_list: s3_delete_object_list.append({ReferenceKeys.KEY_REFERENCE: s3_key}) s3_client = boto3.client(ReferenceKeys.S3_REFERENCE) deletion_response = s3_client.delete_objects( Bucket=bucket_name, Delete={ReferenceKeys.OBJECTS_REFERENCE: s3_delete_object_list}) if 'Deleted' in deletion_response: LoggerUtility.log_debug("Deleted '" + str(len(deletion_response['Deleted'])) + "' objects from '" + bucket_name + "' bucket successfully!") else: LoggerUtility.log_debug("Deleted '" + str(s3_delete_object_list[0]) + "' object from '" + bucket_name + "' bucket successfully!") if 'Errors' in deletion_response: LoggerUtility.log_warning("Failed to delete '" + str(len(deletion_response['Errors'])) + "' objects from '" + bucket_name + "' bucket!")
def create_connection(): """Create connection to RDS.""" try: port_no = int(os.environ[EnvironmentVariables.RDS_DB_PORT_ENV_VAR]) except KeyError as key_error: # Use default port if environment variable is not set LoggerUtility.log_warning( str(key_error) + " not configured, using default port!") port_no = Constants.RDS_AURORA_DEFAULT_PORT_NO try: rds_connection = pymysql.connect( host=os.environ[EnvironmentVariables.RDS_DB_ENDPOINT_ENV_VAR], user=os.environ[EnvironmentVariables.RDS_DB_USER_ENV_VAR], passwd=os.environ[ EnvironmentVariables.RDS_DB_PASSWORD_ENV_VAR], db=os.environ[EnvironmentVariables.RDS_DB_NAME_ENV_VAR], port=port_no, connect_timeout=10, cursorclass=pymysql.cursors.DictCursor) return rds_connection except pymysql.InternalError as connection_error: LoggerUtility.log_error(str(connection_error)) LoggerUtility.log_error( "FATAL: Failed to create connection to RDS!")