def store(event, context): """store file""" # check the store on load configuration if runtime_context.STORE: return { 'statusCode': 400, 'body': json.dumps({ 'message': 'Configured to store files at the time of upload.' }) } # get the list of files from the request file_ids = json.loads(event.get('body')) file_ids = file_ids[:runtime_context. QUERY_LIMIT] # limit the number of files to store stored_file_ids = [] dt = datetime.utcnow() for file_id in file_ids: FileModel.update({ 'id': file_id, 'stored_at': dt, }) LOGGER.debug( 'Files item updated (stored). service=ddb method=update_item id={}' .format(file_id)) stored_file_ids.append(file_id) return {"statusCode": 200, "body": json.dumps(stored_file_ids)}
def delete(event, context): """delete files""" file_ids = json.loads(event.get('body')) deleted_file_ids = [] for file_id in file_ids: # NOTE: there is no check if file has already been deleted FileModel.update({'id': file_id, 'deleted_at': datetime.utcnow()}) LOGGER.debug( 'Files item updated (deleted). service=ddb method=update_item id={}' .format(file_id)) S3_CLIENT.delete_object(Bucket=runtime_context.BUCKET_NAME, Key=file_id) LOGGER.debug( 'S3 object deleted. service=s3 method=delete_object id={}'.format( file_id)) deleted_file_ids.append(file_id) return {"statusCode": 200, "body": json.dumps(deleted_file_ids)}
def expire(event, context): """remove files that are uploaded, not stored, and older than the expiration time scheduled event """ # scan the database for expired files expiry_at = datetime.utcnow() - runtime_context.NONSTORED_TIMEOUT files = FileModel.list_expired(expiry_at) # remove all files and all items one-by-one for file in files: file_id = file['id']['S'] FileModel.update({'id': file_id, 'deleted_at': datetime.utcnow()}) LOGGER.debug( 'Files item updated (expired). service=ddb method=update_item id={}' .format(file_id)) S3_CLIENT.delete_object(Bucket=runtime_context.BUCKET_NAME, Key=file_id) LOGGER.debug( 'S3 object deleted. service=s3 method=delete_object id={}'.format( file_id))
def uploaded(event, context): """S3 event triggers when file is uploaded event: https://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html """ dt = datetime.utcnow() # NOTE: the event might include multiple records for r in event['Records']: file_id = r['s3']['object']['key'] file = { 'id': file_id, 'size': r['s3']['object']['size'], 'type': get_s3_file_type(file_id), 'uploaded_at': dt, } if runtime_context.STORE: file['stored_at'] = dt FileModel.update(file) LOGGER.debug( 'Files item updated (uploaded). service=ddb method=update_item id={}' .format(file_id)) return {"statusCode": 200}