Example #1
0
    def test_put_subscription_filter_firehose(self, logs_client,
                                              logs_log_group, logs_log_stream,
                                              s3_bucket, s3_client,
                                              firehose_client):
        firehose_name = f"test-firehose-{short_uid()}"
        s3_bucket_arn = f"arn:aws:s3:::{s3_bucket}"

        response = firehose_client.create_delivery_stream(
            DeliveryStreamName=firehose_name,
            S3DestinationConfiguration={
                "BucketARN": s3_bucket_arn,
                "RoleARN": "arn:aws:iam::000000000000:role/FirehoseToS3Role",
            },
        )
        firehose_arn = response["DeliveryStreamARN"]

        logs_client.put_subscription_filter(
            logGroupName=logs_log_group,
            filterName="Destination",
            filterPattern="",
            destinationArn=firehose_arn,
        )

        logs_client.put_log_events(
            logGroupName=logs_log_group,
            logStreamName=logs_log_stream,
            logEvents=[
                {
                    "timestamp": now_utc(millis=True),
                    "message": "test"
                },
                {
                    "timestamp": now_utc(millis=True),
                    "message": "test 2"
                },
            ],
        )

        logs_client.put_log_events(
            logGroupName=logs_log_group,
            logStreamName=logs_log_stream,
            logEvents=[
                {
                    "timestamp": now_utc(millis=True),
                    "message": "test"
                },
                {
                    "timestamp": now_utc(millis=True),
                    "message": "test 2"
                },
            ],
        )

        response = s3_client.list_objects(Bucket=s3_bucket)
        assert len(response["Contents"]) == 2

        # clean up
        firehose_client.delete_delivery_stream(
            DeliveryStreamName=firehose_name, AllowForceDelete=True)
Example #2
0
 def put_event():
     self.logs_client.put_log_events(
         logGroupName=log_group,
         logStreamName=log_stream,
         logEvents=[
             {"timestamp": now_utc(millis=True), "message": "test"},
             {"timestamp": now_utc(millis=True), "message": "test 2"},
         ],
     )
Example #3
0
        def wrapped(*args, **kwargs):
            from localstack.utils.common import now_utc

            start_time = now_utc(millis=True)
            try:
                return f(*args, **kwargs)
            finally:
                end_time = now_utc(millis=True)
                func_name = name or f.__name__
                duration = end_time - start_time
                if duration > 500:
                    LOG.info('Execution of "%s" took %.2fms', func_name, duration)
Example #4
0
 def wrapped(*args, **kwargs):
     time_before = now_utc()
     result = None
     try:
         result = func(*args, **kwargs)
         publish_result(ns, time_before, result, kwargs)
     except Exception as e:
         publish_error(ns, time_before, e, kwargs)
         raise e
     finally:
         time_after = now_utc()
     return result
Example #5
0
    def test_put_subscription_filter_firehose(self):
        log_group = "lg-%s" % short_uid()
        log_stream = "ls-%s" % short_uid()
        s3_bucket = "s3-%s" % short_uid()
        s3_bucket_arn = "arn:aws:s3:::{}".format(s3_bucket)
        firehose = "firehose-%s" % short_uid()

        s3_client = aws_stack.connect_to_service("s3")
        firehose_client = aws_stack.connect_to_service("firehose")

        s3_client.create_bucket(Bucket=s3_bucket)
        response = firehose_client.create_delivery_stream(
            DeliveryStreamName=firehose,
            S3DestinationConfiguration={
                "BucketARN": s3_bucket_arn,
                "RoleARN": "arn:aws:iam::000000000000:role/FirehosetoS3Role",
            },
        )
        firehose_arn = response["DeliveryStreamARN"]

        self.create_log_group_and_stream(log_group, log_stream)

        self.logs_client.put_subscription_filter(
            logGroupName=log_group,
            filterName="Destination",
            filterPattern="",
            destinationArn=firehose_arn,
        )

        self.logs_client.put_log_events(
            logGroupName=log_group,
            logStreamName=log_stream,
            logEvents=[
                {"timestamp": now_utc(millis=True), "message": "test"},
                {"timestamp": now_utc(millis=True), "message": "test 2"},
            ],
        )

        self.logs_client.put_log_events(
            logGroupName=log_group,
            logStreamName=log_stream,
            logEvents=[
                {"timestamp": now_utc(millis=True), "message": "test"},
                {"timestamp": now_utc(millis=True), "message": "test 2"},
            ],
        )

        response = s3_client.list_objects(Bucket=s3_bucket)
        self.assertEqual(2, len(response["Contents"]))

        # clean up
        self.cleanup(log_group, log_stream)
        firehose_client.delete_delivery_stream(DeliveryStreamName=firehose, AllowForceDelete=True)
Example #6
0
    def test_put_subscription_filter_lambda(self, lambda_client, logs_client,
                                            create_lambda_function):
        test_lambda_name = f"test-lambda-function-{short_uid()}"
        # TODO add as fixture
        create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON3,
            libs=TEST_LAMBDA_LIBS,
            func_name=test_lambda_name,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        lambda_client.invoke(FunctionName=test_lambda_name, Payload=b"{}")

        log_group_name = f"/aws/lambda/{test_lambda_name}"

        logs_client.put_subscription_filter(
            logGroupName=log_group_name,
            filterName="test",
            filterPattern="",
            destinationArn=func_arn(test_lambda_name),
        )
        log_stream_name = f"test-log-stream-{short_uid()}"
        logs_client.create_log_stream(logGroupName=log_group_name,
                                      logStreamName=log_stream_name)

        logs_client.put_log_events(
            logGroupName=log_group_name,
            logStreamName=log_stream_name,
            logEvents=[
                {
                    "timestamp": now_utc(millis=True),
                    "message": "test"
                },
                {
                    "timestamp": now_utc(millis=True),
                    "message": "test 2"
                },
            ],
        )

        response = logs_client.describe_subscription_filters(
            logGroupName=log_group_name)
        assert len(response["subscriptionFilters"]) == 1

        def check_invocation():
            events = testutil.get_lambda_log_events(test_lambda_name)
            assert len(events) == 2

        retry(check_invocation, retries=6, sleep=3.0)
Example #7
0
def load_plugins(scope=None):
    scope = scope or PLUGIN_SCOPE_SERVICES
    if PLUGINS_LOADED.get(scope):
        return PLUGINS_LOADED[scope]

    t1 = now_utc()
    is_infra_process = (
        os.environ.get(constants.LOCALSTACK_INFRA_PROCESS) in ["1", "true"] or "--host" in sys.argv
    )
    log_level = logging.WARNING if scope == PLUGIN_SCOPE_COMMANDS and not is_infra_process else None
    setup_logging(log_level=log_level)

    loaded_files = []
    result = []

    # Use a predefined list of plugin modules for now, to speed up the plugin loading at startup
    # search_modules = pkgutil.iter_modules()
    search_modules = PLUGIN_MODULES

    for module in search_modules:
        if not should_load_module(module, scope):
            continue
        file_path = None
        if isinstance(module, six.string_types):
            loader = pkgutil.get_loader(module)
            if loader:
                path = getattr(loader, "path", "") or getattr(loader, "filename", "")
                if "__init__.py" in path:
                    path = os.path.dirname(path)
                file_path = os.path.join(path, "plugins.py")
        elif six.PY3 and not isinstance(module, tuple):
            file_path = os.path.join(module.module_finder.path, module.name, "plugins.py")
        elif six.PY3 or isinstance(module[0], pkgutil.ImpImporter):
            if hasattr(module[0], "path"):
                file_path = os.path.join(module[0].path, module[1], "plugins.py")
        if file_path and file_path not in loaded_files:
            plugin_config = load_plugin_from_path(file_path, scope=scope)
            if plugin_config:
                result.append(plugin_config)
            loaded_files.append(file_path)
    # set global flag
    PLUGINS_LOADED[scope] = result

    # debug plugin loading time
    load_time = now_utc() - t1
    if load_time > 5:
        LOG.debug("Plugin loading took %s sec" % load_time)

    return result
Example #8
0
def publish_event(time_before, result, kwargs):
    event_publisher.fire_event(event_publisher.EVENT_LAMBDA_INVOKE_FUNC,
                               payload={
                                   'f': _func_name(kwargs),
                                   'd': now_utc() - time_before,
                                   'r': result[0]
                               })
Example #9
0
def forward_to_fallback_url(func_arn, data):
    """ If LAMBDA_FALLBACK_URL is configured, forward the invocation of this non-existing
        Lambda to the configured URL. """
    if not config.LAMBDA_FALLBACK_URL:
        return None
    if config.LAMBDA_FALLBACK_URL.startswith('dynamodb://'):
        table_name = urlparse(
            config.LAMBDA_FALLBACK_URL.replace('dynamodb://',
                                               'http://')).netloc
        dynamodb = aws_stack.connect_to_service('dynamodb')
        item = {
            'id': {
                'S': short_uid()
            },
            'timestamp': {
                'N': str(now_utc())
            },
            'payload': {
                'S': str(data)
            }
        }
        aws_stack.create_dynamodb_table(table_name, partition_key='id')
        dynamodb.put_item(TableName=table_name, Item=item)
        return ''
    if re.match(r'^https?://.+', config.LAMBDA_FALLBACK_URL):
        response = safe_requests.post(config.LAMBDA_FALLBACK_URL, data)
        return response.content
    raise ClientError('Unexpected value for LAMBDA_FALLBACK_URL: %s' %
                      config.LAMBDA_FALLBACK_URL)
Example #10
0
    def test_put_events_multi_bytes_msg(self):
        group = "g-%s" % short_uid()
        stream = "s-%s" % short_uid()

        groups_before = testutil.list_all_resources(
            lambda kwargs: self.logs_client.describe_log_groups(**kwargs),
            last_token_attr_name="nextToken",
            list_attr_name="logGroups",
        )

        self.create_log_group_and_stream(group, stream)

        groups_after = testutil.list_all_resources(
            lambda kwargs: self.logs_client.describe_log_groups(**kwargs),
            last_token_attr_name="nextToken",
            list_attr_name="logGroups",
        )

        self.assertEqual(len(groups_before) + 1, len(groups_after))

        # send message with non-ASCII (multi-byte) chars
        body_msg = "🙀 - 参よ - 日本語"
        events = [{"timestamp": now_utc(millis=True), "message": body_msg}]
        response = self.logs_client.put_log_events(
            logGroupName=group, logStreamName=stream, logEvents=events
        )
        self.assertEqual(200, response["ResponseMetadata"]["HTTPStatusCode"])

        events = self.logs_client.get_log_events(logGroupName=group, logStreamName=stream)["events"]
        self.assertEqual(body_msg, events[0]["message"])

        # clean up
        self.logs_client.delete_log_group(logGroupName=group)
Example #11
0
def add_missing_record_attributes(records: List[Dict]):
    def _get_entry(obj, key):
        return obj.get(key) or obj.get(first_char_to_lower(key))

    for record in records:
        if not _get_entry(record, "ApproximateArrivalTimestamp"):
            record["ApproximateArrivalTimestamp"] = int(now_utc(millis=True))
        if not _get_entry(record, "KinesisRecordMetadata"):
            record["kinesisRecordMetadata"] = {
                "shardId":
                "shardId-000000000000",
                # not really documented what AWS is using internally - simply using a random UUID here
                "partitionKey":
                str(uuid.uuid4()),
                "approximateArrivalTimestamp":
                timestamp(
                    float(_get_entry(record, "ApproximateArrivalTimestamp")) /
                    1000,
                    format=TIMESTAMP_FORMAT_MICROS,
                ),
                "sequenceNumber":
                next_sequence_number(),
                "subsequenceNumber":
                "",
            }
Example #12
0
    def forward_request(self, method, path, data, headers):
        global STREAM_CONSUMERS
        data = self.decode_content(data or '{}')
        action = headers.get('X-Amz-Target', '').split('.')[-1]

        if action == 'RegisterStreamConsumer':
            consumer = clone(data)
            consumer['ConsumerStatus'] = 'ACTIVE'
            consumer['ConsumerARN'] = '%s/consumer/%s' % (data['StreamARN'],
                                                          data['ConsumerName'])
            consumer['ConsumerCreationTimestamp'] = float(now_utc())
            consumer = json_safe(consumer)
            STREAM_CONSUMERS.append(consumer)
            return {'Consumer': consumer}
        elif action == 'DeregisterStreamConsumer':

            def consumer_matches(c):
                stream_arn = data.get('StreamARN')
                cons_name = data.get('ConsumerName')
                cons_arn = data.get('ConsumerARN')
                return (c.get('ConsumerARN') == cons_arn
                        or (c.get('StreamARN') == stream_arn
                            and c.get('ConsumerName') == cons_name))

            STREAM_CONSUMERS = [
                c for c in STREAM_CONSUMERS if not consumer_matches(c)
            ]
            return {}
        elif action == 'ListStreamConsumers':
            result = {
                'Consumers': [
                    c for c in STREAM_CONSUMERS
                    if c.get('StreamARN') == data.get('StreamARN')
                ]
            }
            return result
        elif action == 'DescribeStreamConsumer':
            consumer_arn = data.get('ConsumerARN') or data['ConsumerName']
            consumer_name = data.get('ConsumerName') or data['ConsumerARN']
            creation_timestamp = data.get('ConsumerCreationTimestamp')
            result = {
                'ConsumerDescription': {
                    'ConsumerARN': consumer_arn,
                    'ConsumerCreationTimestamp': creation_timestamp,
                    'ConsumerName': consumer_name,
                    'ConsumerStatus': 'ACTIVE',
                    'StreamARN': data.get('StreamARN')
                }
            }
            return result
        elif action == 'SubscribeToShard':
            result = subscribe_to_shard(data)
            return result

        if random.random() < config.KINESIS_ERROR_PROBABILITY:
            if action in ['PutRecord', 'PutRecords']:
                return kinesis_error_response(data, action)
        return True
Example #13
0
def add_client_certificate(path, data):
    region_details = APIGatewayRegion.get()
    result = common.clone(data)
    result["clientCertificateId"] = cert_id = common.short_uid()
    result["createdDate"] = common.now_utc()
    result["expirationDate"] = result["createdDate"] + 60 * 60 * 24 * 30  # assume 30 days validity
    result["pemEncodedCertificate"] = "testcert-123"  # TODO return proper certificate!
    region_details.client_certificates[cert_id] = result
    return make_json_response(to_client_cert_response_json(result))
Example #14
0
def publish_event(time_before, result, kwargs):
    event_publisher.fire_event(
        event_publisher.EVENT_LAMBDA_INVOKE_FUNC,
        payload={
            "f": event_publisher.get_hash(_func_name(kwargs)),
            "d": now_utc() - time_before,
            "r": result[0],
        },
    )
Example #15
0
    def test_filter_log_events_response_header(self):
        group = "lg-%s" % short_uid()
        stream = "ls-%s" % short_uid()

        self.create_log_group_and_stream(group, stream)

        events = [
            {"timestamp": now_utc(millis=True), "message": "log message 1"},
            {"timestamp": now_utc(millis=True), "message": "log message 2"},
        ]
        self.logs_client.put_log_events(logGroupName=group, logStreamName=stream, logEvents=events)

        rs = self.logs_client.filter_log_events(logGroupName=group)
        self.assertEqual(200, rs["ResponseMetadata"]["HTTPStatusCode"])
        self.assertEqual(
            APPLICATION_AMZ_JSON_1_1,
            rs["ResponseMetadata"]["HTTPHeaders"]["content-type"],
        )

        # clean up
        self.logs_client.delete_log_group(logGroupName=group)
Example #16
0
    def test_put_subscription_filter_lambda(self):
        lambda_client = aws_stack.connect_to_service("lambda")

        testutil.create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON3,
            libs=TEST_LAMBDA_LIBS,
            func_name=TEST_LAMBDA_NAME_PY3,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY3, Payload=b"{}")

        log_group_name = "/aws/lambda/{}".format(TEST_LAMBDA_NAME_PY3)

        self.logs_client.put_subscription_filter(
            logGroupName=log_group_name,
            filterName="test",
            filterPattern="",
            destinationArn=func_arn(TEST_LAMBDA_NAME_PY3),
        )
        stream = "ls-%s" % short_uid()
        self.logs_client.create_log_stream(logGroupName=log_group_name, logStreamName=stream)

        self.logs_client.put_log_events(
            logGroupName=log_group_name,
            logStreamName=stream,
            logEvents=[
                {"timestamp": now_utc(millis=True), "message": "test"},
                {"timestamp": now_utc(millis=True), "message": "test 2"},
            ],
        )

        resp2 = self.logs_client.describe_subscription_filters(logGroupName=log_group_name)
        self.assertEqual(1, len(resp2["subscriptionFilters"]))

        def check_invocation():
            events = testutil.get_lambda_log_events(TEST_LAMBDA_NAME_PY3)
            self.assertEqual(2, len(events))

        retry(check_invocation, retries=6, sleep=3.0)
Example #17
0
 def wrapped(*args, **kwargs):
     time_before = now_utc()
     try:
         result = func(*args, **kwargs)
         publish_result(ns, time_before, result, kwargs)
     except Exception as e:
         publish_error(ns, time_before, e, kwargs)
         raise e
     finally:
         # TODO
         # time_after = now_utc()
         pass
     return result
Example #18
0
    def test_filter_log_events_response_header(self, logs_client,
                                               logs_log_group,
                                               logs_log_stream):
        events = [
            {
                "timestamp": now_utc(millis=True),
                "message": "log message 1"
            },
            {
                "timestamp": now_utc(millis=True),
                "message": "log message 2"
            },
        ]
        response = logs_client.put_log_events(logGroupName=logs_log_group,
                                              logStreamName=logs_log_stream,
                                              logEvents=events)
        assert response["ResponseMetadata"]["HTTPStatusCode"] == 200

        response = logs_client.filter_log_events(logGroupName=logs_log_group)
        assert response["ResponseMetadata"]["HTTPStatusCode"] == 200
        assert (response["ResponseMetadata"]["HTTPHeaders"]["content-type"] ==
                APPLICATION_AMZ_JSON_1_1)
Example #19
0
    def test_put_events_multi_bytes_msg(self, logs_client, logs_log_group,
                                        logs_log_stream):
        body_msg = "🙀 - 参よ - 日本語"
        events = [{"timestamp": now_utc(millis=True), "message": body_msg}]
        response = logs_client.put_log_events(logGroupName=logs_log_group,
                                              logStreamName=logs_log_stream,
                                              logEvents=events)
        assert response["ResponseMetadata"]["HTTPStatusCode"] == 200

        events = logs_client.get_log_events(
            logGroupName=logs_log_group,
            logStreamName=logs_log_stream)["events"]
        assert events[0]["message"] == body_msg
Example #20
0
    def forward_request(self, method, path, data, headers):
        global STREAM_CONSUMERS
        data = json.loads(to_str(data or '{}'))
        action = headers.get('X-Amz-Target')

        if action == '%s.RegisterStreamConsumer' % ACTION_PREFIX:
            consumer = clone(data)
            consumer['ConsumerStatus'] = 'ACTIVE'
            consumer['ConsumerARN'] = '%s/consumer/%s' % (data['StreamARN'], data['ConsumerName'])
            consumer['ConsumerCreationTimestamp'] = float(now_utc())
            consumer = json_safe(consumer)
            STREAM_CONSUMERS.append(consumer)
            return {'Consumer': consumer}
        elif action == '%s.DeregisterStreamConsumer' % ACTION_PREFIX:
            def consumer_matches(c):
                stream_arn = data.get('StreamARN')
                cons_name = data.get('ConsumerName')
                cons_arn = data.get('ConsumerARN')
                return (c.get('ConsumerARN') == cons_arn or
                    (c.get('StreamARN') == stream_arn and c.get('ConsumerName') == cons_name))
            STREAM_CONSUMERS = [c for c in STREAM_CONSUMERS if not consumer_matches(c)]
            return {}
        elif action == '%s.ListStreamConsumers' % ACTION_PREFIX:
            result = {
                'Consumers': [c for c in STREAM_CONSUMERS if c.get('StreamARN') == data.get('StreamARN')]
            }
            return result
        elif action == '%s.DescribeStreamConsumer' % ACTION_PREFIX:
            consumer_arn = data.get('ConsumerARN') or data['ConsumerName']
            consumer_name = data.get('ConsumerName') or data['ConsumerARN']
            creation_timestamp = data.get('ConsumerCreationTimestamp')
            result = {
                'ConsumerDescription': {
                    'ConsumerARN': consumer_arn,
                    'ConsumerCreationTimestamp': creation_timestamp,
                    'ConsumerName': consumer_name,
                    'ConsumerStatus': 'ACTIVE',
                    'StreamARN': data.get('StreamARN')
                }
            }
            return result

        if random.random() < config.KINESIS_ERROR_PROBABILITY:
            action = headers.get('X-Amz-Target')
            if action in [ACTION_PUT_RECORD, ACTION_PUT_RECORDS]:
                return kinesis_error_response(data, action)
        return True
Example #21
0
    def forward_request(self, method, path, data, headers):
        global STREAM_CONSUMERS
        data, encoding_type = self.decode_content(data or '{}', True)
        action = headers.get('X-Amz-Target', '').split('.')[-1]
        if action == 'RegisterStreamConsumer':
            stream_arn = data.get('StreamARN', '').strip('" ')
            cons_arn = data.get('ConsumerARN', '').strip('" ')
            cons_name = data.get('ConsumerName', '').strip('" ')
            prev_consumer = find_consumer(cons_arn, cons_name, stream_arn)

            if prev_consumer:
                msg = 'Consumer %s already exists' % prev_consumer.get(
                    'ConsumerARN')
                return simple_error_response(msg, 400, 'ResourceAlreadyExists',
                                             encoding_type)

            consumer = clone(data)
            consumer['ConsumerStatus'] = 'ACTIVE'
            consumer['ConsumerARN'] = '%s/consumer/%s' % (stream_arn,
                                                          cons_name)
            consumer['ConsumerCreationTimestamp'] = now_utc()
            consumer = json_safe(consumer)
            STREAM_CONSUMERS.append(consumer)

            result = {'Consumer': consumer}

            return encoded_response(result, encoding_type)

        elif action == 'DeregisterStreamConsumer':

            def consumer_matches(c):
                stream_arn = data.get('StreamARN', '').strip('" ')
                cons_name = data.get('ConsumerName', '').strip('" ')
                cons_arn = data.get('ConsumerARN', '').strip('" ')
                return (c.get('ConsumerARN') == cons_arn
                        or (c.get('StreamARN') == stream_arn
                            and c.get('ConsumerName') == cons_name))

            STREAM_CONSUMERS = [
                c for c in STREAM_CONSUMERS if not consumer_matches(c)
            ]
            return {}

        elif action == 'ListStreamConsumers':
            stream_arn = data.get('StreamARN', '').strip('" ')
            result = {
                'Consumers': [
                    c for c in STREAM_CONSUMERS
                    if c.get('StreamARN') == stream_arn
                ]
            }
            return encoded_response(result, encoding_type)

        elif action == 'DescribeStreamConsumer':
            consumer_arn = data.get('ConsumerARN', '').strip('" ')
            consumer_name = data.get('ConsumerName', '').strip('" ')
            stream_arn = data.get('StreamARN', '').strip('" ')

            consumer_to_locate = find_consumer(consumer_arn, consumer_name,
                                               stream_arn)
            if (not consumer_to_locate):
                error_msg = 'Consumer %s not found.' % (consumer_arn
                                                        or consumer_name)
                return simple_error_response(error_msg, 400,
                                             'ResourceNotFoundException',
                                             encoding_type)

            create_timestamp = consumer_to_locate.get(
                'ConsumerCreationTimestamp')
            time_formated = int(
                create_timestamp
            ) if encoding_type is not APPLICATION_JSON else create_timestamp

            result = {
                'ConsumerDescription': {
                    'ConsumerARN': consumer_to_locate.get('ConsumerARN'),
                    'ConsumerCreationTimestamp': time_formated,
                    'ConsumerName': consumer_to_locate.get('ConsumerName'),
                    'ConsumerStatus': 'ACTIVE',
                    'StreamARN': data.get('StreamARN')
                }
            }
            return encoded_response(result, encoding_type)

        elif action == 'SubscribeToShard':
            result = subscribe_to_shard(data, headers)
            return result

        if random.random() < config.KINESIS_ERROR_PROBABILITY:
            if action in ['PutRecord', 'PutRecords']:
                return kinesis_error_response(data, action)

        return True
Example #22
0
 def test_now_utc(self):
     env = common.now_utc()
     test = datetime.now(pytz.UTC).timestamp()
     assert test == pytest.approx(env, 1)
Example #23
0
 def test_now_utc(self):
     env = common.now_utc()
     test = datetime.now(pytz.UTC).timestamp()
     self.assertAlmostEqual(env, test, delta=1)
Example #24
0
    def test_metric_filters(self, logs_client, logs_log_group, logs_log_stream,
                            cloudwatch_client):
        basic_filter_name = f"test-filter-basic-{short_uid()}"
        json_filter_name = f"test-filter-json-{short_uid()}"
        namespace_name = f"test-metric-namespace-{short_uid()}"
        basic_metric_name = f"test-basic-metric-{short_uid()}"
        json_metric_name = f"test-json-metric-{short_uid()}"
        basic_transforms = {
            "metricNamespace": namespace_name,
            "metricName": basic_metric_name,
            "metricValue": "1",
            "defaultValue": 0,
        }
        json_transforms = {
            "metricNamespace": namespace_name,
            "metricName": json_metric_name,
            "metricValue": "1",
            "defaultValue": 0,
        }
        logs_client.put_metric_filter(
            logGroupName=logs_log_group,
            filterName=basic_filter_name,
            filterPattern=" ",
            metricTransformations=[basic_transforms],
        )
        logs_client.put_metric_filter(
            logGroupName=logs_log_group,
            filterName=json_filter_name,
            filterPattern='{$.message = "test"}',
            metricTransformations=[json_transforms],
        )

        response = logs_client.describe_metric_filters(
            logGroupName=logs_log_group, filterNamePrefix="test-filter-")
        assert response["ResponseMetadata"]["HTTPStatusCode"] == 200
        filter_names = [
            _filter["filterName"] for _filter in response["metricFilters"]
        ]
        assert basic_filter_name in filter_names
        assert json_filter_name in filter_names

        # put log events and assert metrics being published
        events = [
            {
                "timestamp": now_utc(millis=True),
                "message": "log message 1"
            },
            {
                "timestamp": now_utc(millis=True),
                "message": "log message 2"
            },
        ]
        logs_client.put_log_events(logGroupName=logs_log_group,
                                   logStreamName=logs_log_stream,
                                   logEvents=events)

        # list metrics
        response = cloudwatch_client.list_metrics(Namespace=namespace_name)
        assert len(response["Metrics"]) == 2

        # delete filters
        logs_client.delete_metric_filter(logGroupName=logs_log_group,
                                         filterName=basic_filter_name)
        logs_client.delete_metric_filter(logGroupName=logs_log_group,
                                         filterName=json_filter_name)

        response = logs_client.describe_metric_filters(
            logGroupName=logs_log_group, filterNamePrefix="test-filter-")
        assert response["ResponseMetadata"]["HTTPStatusCode"] == 200
        filter_names = [
            _filter["filterName"] for _filter in response["metricFilters"]
        ]
        assert basic_filter_name not in filter_names
        assert json_filter_name not in filter_names
Example #25
0
    def test_put_subscription_filter_firehose(
        self,
        logs_client,
        logs_log_group,
        logs_log_stream,
        s3_bucket,
        s3_client,
        firehose_client,
        iam_client,
        iam_create_role_and_policy,
    ):
        try:
            firehose_name = f"test-firehose-{short_uid()}"
            s3_bucket_arn = f"arn:aws:s3:::{s3_bucket}"

            role = f"test-firehose-s3-role-{short_uid()}"
            policy_name = f"test-firehose-s3-role-policy-{short_uid()}"
            role_arn = iam_create_role_and_policy(
                RoleName=role,
                PolicyName=policy_name,
                RoleDefinition=s3_firehose_role,
                PolicyDefinition=s3_firehose_permission,
            )

            # TODO AWS has troubles creating the delivery stream the first time
            # policy is not accepted at first, so we try again
            def create_delivery_stream():
                firehose_client.create_delivery_stream(
                    DeliveryStreamName=firehose_name,
                    S3DestinationConfiguration={
                        "BucketARN": s3_bucket_arn,
                        "RoleARN": role_arn,
                        "BufferingHints": {
                            "SizeInMBs": 1,
                            "IntervalInSeconds": 60
                        },
                    },
                )

            retry(create_delivery_stream, retries=5, sleep=10.0)

            response = firehose_client.describe_delivery_stream(
                DeliveryStreamName=firehose_name)
            firehose_arn = response["DeliveryStreamDescription"][
                "DeliveryStreamARN"]

            role = f"test-firehose-role-{short_uid()}"
            policy_name = f"test-firehose-role-policy-{short_uid()}"
            role_arn_logs = iam_create_role_and_policy(
                RoleName=role,
                PolicyName=policy_name,
                RoleDefinition=logs_role,
                PolicyDefinition=firehose_permission,
            )

            def check_stream_active():
                state = firehose_client.describe_delivery_stream(
                    DeliveryStreamName=firehose_name
                )["DeliveryStreamDescription"]["DeliveryStreamStatus"]
                if state != "ACTIVE":
                    raise Exception(f"DeliveryStreamStatus is {state}")

            retry(check_stream_active, retries=60, sleep=30.0)

            logs_client.put_subscription_filter(
                logGroupName=logs_log_group,
                filterName="Destination",
                filterPattern="",
                destinationArn=firehose_arn,
                roleArn=role_arn_logs,
            )

            logs_client.put_log_events(
                logGroupName=logs_log_group,
                logStreamName=logs_log_stream,
                logEvents=[
                    {
                        "timestamp": now_utc(millis=True),
                        "message": "test"
                    },
                    {
                        "timestamp": now_utc(millis=True),
                        "message": "test 2"
                    },
                ],
            )

            def list_objects():
                response = s3_client.list_objects(Bucket=s3_bucket)
                assert len(response["Contents"]) >= 1

            retry(list_objects, retries=60, sleep=30.0)
            response = s3_client.list_objects(Bucket=s3_bucket)
            key = response["Contents"][-1]["Key"]
            response = s3_client.get_object(Bucket=s3_bucket, Key=key)
            content = gzip.decompress(response["Body"].read()).decode("utf-8")
            assert "DATA_MESSAGE" in content
            assert "test" in content
            assert "test 2" in content

        finally:
            # clean up
            firehose_client.delete_delivery_stream(
                DeliveryStreamName=firehose_name, AllowForceDelete=True)
def shard_id(stream_arn, kinesis_shard_id):
    timestamp = str(now_utc())
    timestamp = '%s00000000' % timestamp[:-5]
    timestamp = '%s%s' % ('0' * (20 - len(timestamp)), timestamp)
    suffix = kinesis_shard_id.replace('shardId-', '')[:32]
    return 'shardId-%s-%s' % (timestamp, suffix)
Example #27
0
def publish_lambda_duration(time_before, kwargs):
    time_after = now_utc()
    publish_lambda_metric('Duration', time_after - time_before, kwargs)
Example #28
0
 def test_timstamp_millis(self):
     t1 = now_utc()
     t2 = now_utc(millis=True) / 1000
     self.assertAlmostEqual(t1, t2, delta=1)
def shard_id(kinesis_shard_id: str) -> str:
    timestamp = str(int(now_utc()))
    timestamp = f"{timestamp[:-5]}00000000".rjust(20, "0")
    kinesis_shard_params = kinesis_shard_id.split("-")
    return f"{kinesis_shard_params[0]}-{timestamp}-{kinesis_shard_params[-1][:32]}"
Example #30
0
    def test_put_subscription_filter_lambda(
        self,
        lambda_client,
        logs_client,
        logs_log_group,
        logs_log_stream,
        create_lambda_function,
        sts_client,
    ):
        test_lambda_name = f"test-lambda-function-{short_uid()}"
        create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON3,
            libs=TEST_LAMBDA_LIBS,
            func_name=test_lambda_name,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )
        try:
            lambda_client.invoke(FunctionName=test_lambda_name, Payload=b"{}")
            # get account-id to set the correct policy
            account_id = sts_client.get_caller_identity()["Account"]
            lambda_client.add_permission(
                FunctionName=test_lambda_name,
                StatementId=test_lambda_name,
                Principal=f"logs.{config.DEFAULT_REGION}.amazonaws.com",
                Action="lambda:InvokeFunction",
                SourceArn=
                f"arn:aws:logs:{config.DEFAULT_REGION}:{account_id}:log-group:{logs_log_group}:*",
                SourceAccount=account_id,
            )
            logs_client.put_subscription_filter(
                logGroupName=logs_log_group,
                filterName="test",
                filterPattern="",
                destinationArn=aws_stack.lambda_function_arn(
                    test_lambda_name,
                    account_id=account_id,
                    region_name=config.DEFAULT_REGION),
            )

            logs_client.put_log_events(
                logGroupName=logs_log_group,
                logStreamName=logs_log_stream,
                logEvents=[
                    {
                        "timestamp": now_utc(millis=True),
                        "message": "test"
                    },
                    {
                        "timestamp": now_utc(millis=True),
                        "message": "test 2"
                    },
                ],
            )

            response = logs_client.describe_subscription_filters(
                logGroupName=logs_log_group)
            assert len(response["subscriptionFilters"]) == 1

            def check_invocation():
                events = testutil.get_lambda_log_events(
                    test_lambda_name,
                    log_group=logs_log_group,
                    logs_client=logs_client)
                assert len(events) == 2
                assert "test" in events
                assert "test 2" in events

            retry(check_invocation, retries=6, sleep=3.0)
        finally:
            # clean up lambda log group
            log_group_name = f"/aws/lambda/{test_lambda_name}"
            logs_client.delete_log_group(logGroupName=log_group_name)
def publish_lambda_duration(time_before, kwargs):
    time_after = now_utc()
    publish_lambda_metric('Duration', time_after - time_before, kwargs)
Example #32
0
 def test_timstamp_millis(self):
     t1 = now_utc()
     t2 = now_utc(millis=True)
     self.assertLessEqual(t2 - t1, 1)