Ejemplo n.º 1
0
 def _queue_url(params, resources, resource_id, **kwargs):
     resource = cls(resources[resource_id])
     props = resource.props
     queue_url = resource.physical_resource_id or props.get("QueueUrl")
     if queue_url:
         return queue_url
     return aws_stack.sqs_queue_url_for_arn(props["QueueArn"])
Ejemplo n.º 2
0
def send_notification_for_subscriber(notif, bucket_name, object_path, version_id, api_method, action, event_name):
    bucket_name = normalize_bucket_name(bucket_name)

    if not event_type_matches(notif['Event'], action, api_method) or \
            not filter_rules_match(notif.get('Filter'), object_path):
        return

    key = urlparse.unquote(object_path.replace('//', '/'))[1:]

    s3_client = aws_stack.connect_to_service('s3')
    try:
        object_size = s3_client.head_object(Bucket=bucket_name, Key=key).get('ContentLength', 0)
    except botocore.exceptions.ClientError:
        object_size = 0

    # build event message
    message = get_event_message(
        event_name=event_name,
        bucket_name=bucket_name,
        file_name=key,
        file_size=object_size,
        version_id=version_id
    )
    message = json.dumps(message)

    if notif.get('Queue'):
        sqs_client = aws_stack.connect_to_service('sqs')
        try:
            queue_url = aws_stack.sqs_queue_url_for_arn(notif['Queue'])
            sqs_client.send_message(QueueUrl=queue_url, MessageBody=message)
        except Exception as e:
            LOGGER.warning('Unable to send notification for S3 bucket "%s" to SQS queue "%s": %s' %
                (bucket_name, notif['Queue'], e))
    if notif.get('Topic'):
        sns_client = aws_stack.connect_to_service('sns')
        try:
            sns_client.publish(TopicArn=notif['Topic'], Message=message, Subject='Amazon S3 Notification')
        except Exception:
            LOGGER.warning('Unable to send notification for S3 bucket "%s" to SNS topic "%s".' %
                (bucket_name, notif['Topic']))
    # CloudFunction and LambdaFunction are semantically identical
    lambda_function_config = notif.get('CloudFunction') or notif.get('LambdaFunction')
    if lambda_function_config:
        # make sure we don't run into a socket timeout
        connection_config = botocore.config.Config(read_timeout=300)
        lambda_client = aws_stack.connect_to_service('lambda', config=connection_config)
        try:
            lambda_client.invoke(FunctionName=lambda_function_config,
                                 InvocationType='Event', Payload=message)
        except Exception:
            LOGGER.warning('Unable to send notification for S3 bucket "%s" to Lambda function "%s".' %
                (bucket_name, lambda_function_config))

    if not filter(lambda x: notif.get(x), NOTIFICATION_DESTINATION_TYPES):
        LOGGER.warning('Neither of %s defined for S3 notification.' %
            '/'.join(NOTIFICATION_DESTINATION_TYPES))
    def _listener_loop(self, *args):
        while True:
            try:
                sources = self.get_matching_event_sources()
                if not sources:
                    # Temporarily disable polling if no event sources are configured
                    # anymore. The loop will get restarted next time a message
                    # arrives and if an event source is configured.
                    self.SQS_LISTENER_THREAD.pop("_thread_")
                    return

                unprocessed_messages = {}

                for source in sources:
                    queue_arn = source["EventSourceArn"]
                    region_name = queue_arn.split(":")[3]
                    sqs_client = aws_stack.connect_to_service(
                        "sqs", region_name=region_name)
                    batch_size = max(min(source.get("BatchSize", 1), 10), 1)

                    try:
                        queue_url = aws_stack.sqs_queue_url_for_arn(queue_arn)
                        messages = unprocessed_messages.pop(queue_arn, None)
                        if not messages:
                            result = sqs_client.receive_message(
                                QueueUrl=queue_url,
                                AttributeNames=["All"],
                                MessageAttributeNames=["All"],
                                MaxNumberOfMessages=batch_size,
                            )
                            messages = result.get("Messages")
                            if not messages:
                                continue

                        res = self._process_messages_for_event_source(
                            source, messages)
                        if not res:
                            unprocessed_messages[queue_arn] = messages

                    except Exception as e:
                        if "NonExistentQueue" not in str(e):
                            # TODO: remove event source if queue does no longer exist?
                            LOG.debug(
                                "Unable to poll SQS messages for queue %s: %s",
                                queue_arn, e)

            except Exception:
                pass
            finally:
                time.sleep(self.SQS_POLL_INTERVAL_SEC)
 def _process_messages_for_event_source(self, source, messages):
     lambda_arn = source["FunctionArn"]
     queue_arn = source["EventSourceArn"]
     region_name = queue_arn.split(":")[3]
     queue_url = aws_stack.sqs_queue_url_for_arn(queue_arn)
     LOG.debug("Sending event from event source %s to Lambda %s", queue_arn,
               lambda_arn)
     res = self._send_event_to_lambda(
         queue_arn,
         queue_url,
         lambda_arn,
         messages,
         region=region_name,
     )
     return res