def consumer_sqs_delete_duration(request): cw = CloudWatch() return cw.get_metric( util.get_cloudwatch_namespace(os.environ[c.ENV_DEPLOYMENT_STACK_ARN]), c.CW_METRIC_NAME_PROCESSED, c.CW_METRIC_DIMENSION_NAME_CONSUMER, c.CW_METRIC_DIMENSION_DEL, "Average", start=(datetime.datetime.today() - datetime.timedelta(hours=8)))
def main(event, lambdacontext): starttime = time.time() queue_url = event.get(c.KEY_SQS_QUEUE_URL, None) print "Started consumer with queue url '{}'".format(queue_url) context = event.get("context", {}) context[c.KEY_SQS_QUEUE_URL] = queue_url context[c.KEY_LAMBDA_FUNCTION] = lambdacontext.function_name if hasattr(lambdacontext, 'function_name') else None context[c.KEY_REQUEST_ID] = lambdacontext.aws_request_id if hasattr(lambdacontext, 'aws_request_id') else None context[c.KEY_IS_LAMBDA_ENV] = context[c.KEY_REQUEST_ID] is not None prefix = util.get_stack_name_from_arn(os.environ[c.ENV_DEPLOYMENT_STACK_ARN]) context[c.KEY_STACK_PREFIX] = prefix context[c.KEY_SQS] = Sqs(context, "{0}_".format(prefix)) context[c.KEY_SQS_AMOEBA] = Sqs(context, "{0}{1}_".format(prefix, c.KEY_SQS_AMOEBA_SUFFIX)) context[c.KEY_SQS_AMOEBA].set_queue_url(lowest_load_queue=True) context[c.KEY_LAMBDA] = Lambda(context) context[c.KEY_CLOUDWATCH] = CloudWatch(context) context[c.KEY_THREAD_POOL] = ThreadPool(context, 8) context[c.KEY_METRIC_BUCKET] = os.environ[c.RES_S3_STORAGE] context[c.KEY_START_TIME] = starttime context[c.CW_ATTR_SAVE_DURATION] = context[c.KEY_CLOUDWATCH].avg_save_duration(util.get_cloudwatch_namespace(os.environ[c.ENV_DEPLOYMENT_STACK_ARN])) context[c.CW_ATTR_DELETE_DURATION] = context[c.KEY_CLOUDWATCH].avg_delete_duration(util.get_cloudwatch_namespace(os.environ[c.ENV_DEPLOYMENT_STACK_ARN])) context[c.KEY_SUCCEEDED_MSG_IDS] = [] process(context) del context gc.collect() return { 'StatusCode': 200 }
def cloudwatch_metric(request, namespace, metric_name, dimension_name, dimension_value, aggregation_type, time_delta_hours=8, period_in_seconds=300): if dimension_value not in os.environ: raise ClientError( "The dimension value '{}' is not one of the environment variables. It should be the logical name of a resource. Example: FIFOConsumer." .format(dimension_value)) cw = CloudWatch() return cw.get_metric("AWS/" + namespace, metric_name, dimension_name, os.environ[dimension_value], aggregation_type, start=(datetime.datetime.today() - datetime.timedelta(hours=time_delta_hours)), period=period_in_seconds)
def sum_delete_duration(request): cw = CloudWatch() return cw.sum_delete_duration( util.get_cloudwatch_namespace(os.environ[c.ENV_DEPLOYMENT_STACK_ARN]))
import boto3 import cv2 # Enable debug to print all data DEBUG = False # Enable cloundwatch will log every event ENABLE_CLOUDWATCH = True # Cloudwatch initialization if ENABLE_CLOUDWATCH: from cloudwatch import CloudWatch cloudwatch = CloudWatch() class Rekognition(object): """ AWS faces rekognition Args: img (str): Path of image """ def __init__(self, img): self.client = boto3.client('rekognition') self.s3 = boto3.client('s3') self.bucket = 'cto-faces-index' self.collection_id = 'cto-cnx-team' self.threshhold = 95 self.max_faces = 5 self.source_img = img def upload_image(self):
from task_worker import TaskWorker from datastore import Datastore from cloud_storage import CloudStorage from cloudwatch import CloudWatch import threading import pika ENTITY_KIND = 'job' QUEUE = 'queue' AMQP_URL = '' DATASTORE = Datastore() STORAGE = CloudStorage() CLOUDWATCH = CloudWatch() def publish_queue_length(channel): try: count = channel.queue_declare(queue=QUEUE, durable=True).method.message_count CLOUDWATCH.publish_queue_length(count) finally: threading.Timer(5, publish_queue_length, [channel]).start() def callback(ch, method, properties, body): try: entity = DATASTORE.get(ENTITY_KIND, long(body)) worker = TaskWorker(DATASTORE, STORAGE, entity) worker.start() finally:
if ENABLE_AWS_FACIAL_REKOGNITION: from aws_rekognition import Rekognition # Load facial detection network if ENABLE_CAFFE_NET: caffe_net = cv2.dnn.readNetFromCaffe( 'bin/deploy.prototxt.txt', 'bin/res10_300x300_ssd_iter_140000.caffemodel') else: face_cascade = cv2.CascadeClassifier( "bin/haarcascade_frontalface_default.xml") # Cloudwatch initialization camera and algorithm settings log if ENABLE_CLOUDWATCH: from cloudwatch import CloudWatch cloudwatch = CloudWatch() cloudwatch.logging(group='facial-rekognition', channel='facial-rekognition', level='notice', message='Initialization Camera And Algorithm Settings', context={ 'camera_device_id': CAMERA_DEVICE_ID, 'source_capture_info': { 'width': FRAME_WIDTH, 'height': FRAME_HEIGHT }, 'algorithm_setting': { 'enable_darkflow': ENABLE_DARKFLOW, 'enable_caffe_net': ENABLE_CAFFE_NET, 'enable_aws_facial_rekognition': ENABLE_AWS_FACIAL_REKOGNITION,