Example #1
0
import boto3
from botocore.exceptions import ClientError
import json
import logging
from datetime import datetime
from aws_lambda_powertools import Tracer, Logger

tracer = Tracer()
logger = Logger()

logger.setLevel(logging.DEBUG)


@tracer.capture_method
def prep_archive_content(item_id, response):
    with open('/tmp/{}.csv'.format(item_id), 'w') as archive:
        # write csv header
        archive.write(
            "Item Id,Title,Content,Created,Updated,Archived,Deleted,Complete,Archived Date\n"
        )
        item = response['Item']
        entry = "{},{},{},{},{},{},{},{},{}\n".format(
            item_id, item['title'], item['content'], item['created_date'],
            item['updated_date'], "True", item['is_deleted'], item['is_done'],
            datetime.now().strftime("%d-%m-%Y %H:%M:%S"))
        archive.write(entry)


@tracer.capture_method
def mark_item_archived(item_id, dynamodb_table):
    response = dynamodb_table.update_item(
Example #2
0
import boto3
import json
import logging
from aws_lambda_powertools import Tracer, Logger

tracer = Tracer()
logger = Logger()

logger.setLevel(logging.INFO)


@logger.inject_lambda_context
@tracer.capture_lambda_handler
def lambda_handler(event, context):
    logger.info("Event:{}".format(event))

    s3_client = boto3.resource('s3')
    archive_bucket = s3_client.Bucket('todo-list-archive-bucket-cb')

    archives = []
    for archive_item in archive_bucket.objects.all():
        archives.append(archive_item.key)
        logger.info("{}".format(archive_item))

    logger.info("Archives: {}".format(archives))

    return {'statusCode': 200, 'body': json.dumps(archives)}