Exemple #1
0
def process_sqs_message(message_body, queue_name):
    # feed message into the first listening lambda
    try:
        queue_arn = aws_stack.sqs_queue_arn(queue_name)
        source = next(iter(get_event_sources(source_arn=queue_arn)), None)
        if source:
            arn = source['FunctionArn']
            event = {'Records': [{
                'body': message_body,
                'receiptHandle': 'MessageReceiptHandle',
                'md5OfBody': md5(message_body),
                'eventSourceARN': queue_arn,
                'eventSource': 'aws:sqs',
                'awsRegion': aws_stack.get_local_region(),
                'messageId': str(uuid.uuid4()),
                'attributes': {
                    'ApproximateFirstReceiveTimestamp': '{}000'.format(int(time.time())),
                    'SenderId': '123456789012',
                    'ApproximateReceiveCount': '1',
                    'SentTimestamp': '{}000'.format(int(time.time()))
                },
                'messageAttributes': {},
                'sqs': True,
            }]}
            run_lambda(event=event, context={}, func_arn=arn)
            return True
    except Exception as e:
        LOG.warning('Unable to run Lambda function on SQS messages: %s %s' % (e, traceback.format_exc()))
Exemple #2
0
    def test_stream_spec_and_region_replacement(self):
        aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME_4,
                                        partition_key=PARTITION_KEY,
                                        stream_view_type='NEW_AND_OLD_IMAGES')

        table = self.dynamodb.Table(TEST_DDB_TABLE_NAME_4)

        # assert ARN formats
        expected_arn_prefix = 'arn:aws:dynamodb:' + aws_stack.get_local_region(
        )
        self.assertTrue(table.table_arn.startswith(expected_arn_prefix))
        self.assertTrue(
            table.latest_stream_arn.startswith(expected_arn_prefix))

        # assert shard ID formats
        ddbstreams = aws_stack.connect_to_service('dynamodbstreams')
        result = ddbstreams.describe_stream(
            StreamArn=table.latest_stream_arn)['StreamDescription']
        self.assertIn('Shards', result)
        for shard in result['Shards']:
            self.assertRegex(shard['ShardId'],
                             r'^shardId\-[0-9]{20}\-[a-zA-Z0-9]{1,36}$')

        # clean up
        delete_table(TEST_DDB_TABLE_NAME_4)
Exemple #3
0
    def test_stream_spec_and_region_replacement(self):
        ddbstreams = aws_stack.connect_to_service('dynamodbstreams')
        kinesis = aws_stack.connect_to_service('kinesis')
        table_name = 'ddb-%s' % short_uid()
        aws_stack.create_dynamodb_table(
            table_name, partition_key=PARTITION_KEY, stream_view_type='NEW_AND_OLD_IMAGES')

        table = self.dynamodb.Table(table_name)

        # assert ARN formats
        expected_arn_prefix = 'arn:aws:dynamodb:' + aws_stack.get_local_region()
        self.assertTrue(table.table_arn.startswith(expected_arn_prefix))
        self.assertTrue(table.latest_stream_arn.startswith(expected_arn_prefix))

        # assert stream has been created
        stream_tables = [s['TableName'] for s in ddbstreams.list_streams()['Streams']]
        self.assertIn(table_name, stream_tables)
        stream_name = get_kinesis_stream_name(table_name)
        self.assertIn(stream_name, kinesis.list_streams()['StreamNames'])

        # assert shard ID formats
        result = ddbstreams.describe_stream(StreamArn=table.latest_stream_arn)['StreamDescription']
        self.assertIn('Shards', result)
        for shard in result['Shards']:
            self.assertRegex(shard['ShardId'], r'^shardId\-[0-9]{20}\-[a-zA-Z0-9]{1,36}$')

        # clean up
        delete_table(table_name)
        # assert stream has been deleted
        stream_tables = [s['TableName'] for s in ddbstreams.list_streams()['Streams']]
        self.assertNotIn(table_name, stream_tables)
        self.assertNotIn(stream_name, kinesis.list_streams()['StreamNames'])
Exemple #4
0
def extract_region_from_auth_header(headers):
    # TODO: use method from aws_stack directly (leaving import here for now, to avoid circular dependency)
    from localstack.utils.aws import aws_stack

    auth = headers.get("Authorization") or ""
    region = re.sub(r".*Credential=[^/]+/[^/]+/([^/]+)/.*", r"\1", auth)
    if region == auth:
        return None
    region = region or aws_stack.get_local_region()
    return region
Exemple #5
0
    def test_region_replacement(self):
        dynamodb = aws_stack.connect_to_resource('dynamodb')
        aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME_4,
                                        partition_key=PARTITION_KEY,
                                        stream_view_type='NEW_AND_OLD_IMAGES')

        table = dynamodb.Table(TEST_DDB_TABLE_NAME_4)

        expected_arn_prefix = 'arn:aws:dynamodb:' + aws_stack.get_local_region(
        )

        assert table.table_arn.startswith(expected_arn_prefix)
        assert table.latest_stream_arn.startswith(expected_arn_prefix)
Exemple #6
0
def extract_region_from_headers(headers):
    region = headers.get(MARKER_APIGW_REQUEST_REGION)
    # Fix region lookup for certain requests, e.g., API gateway invocations
    #  that do not contain region details in the Authorization header.

    if region:
        return region

    region = extract_region_from_auth_header(headers)

    if not region:
        # fall back to local region
        region = aws_stack.get_local_region()

    return region
Exemple #7
0
    def test_region_replacement(self):
        aws_stack.create_dynamodb_table(
            TEST_DDB_TABLE_NAME_4,
            partition_key=PARTITION_KEY,
            stream_view_type='NEW_AND_OLD_IMAGES'
        )

        table = self.dynamodb.Table(TEST_DDB_TABLE_NAME_4)

        expected_arn_prefix = 'arn:aws:dynamodb:' + aws_stack.get_local_region()

        self.assertTrue(table.table_arn.startswith(expected_arn_prefix))
        self.assertTrue(table.latest_stream_arn.startswith(expected_arn_prefix))

        # clean up
        delete_table(TEST_DDB_TABLE_NAME_4)
Exemple #8
0
    def test_stream_spec_and_region_replacement(self):
        ddbstreams = aws_stack.connect_to_service("dynamodbstreams")
        kinesis = aws_stack.connect_to_service("kinesis")
        table_name = "ddb-%s" % short_uid()
        aws_stack.create_dynamodb_table(
            table_name,
            partition_key=PARTITION_KEY,
            stream_view_type="NEW_AND_OLD_IMAGES",
        )

        table = self.dynamodb.Table(table_name)

        # assert ARN formats
        expected_arn_prefix = "arn:aws:dynamodb:" + aws_stack.get_local_region(
        )
        self.assertTrue(table.table_arn.startswith(expected_arn_prefix))
        self.assertTrue(
            table.latest_stream_arn.startswith(expected_arn_prefix))

        # assert stream has been created
        stream_tables = [
            s["TableName"] for s in ddbstreams.list_streams()["Streams"]
        ]
        self.assertIn(table_name, stream_tables)
        stream_name = get_kinesis_stream_name(table_name)
        self.assertIn(stream_name, kinesis.list_streams()["StreamNames"])

        # assert shard ID formats
        result = ddbstreams.describe_stream(
            StreamArn=table.latest_stream_arn)["StreamDescription"]
        self.assertIn("Shards", result)
        for shard in result["Shards"]:
            self.assertRegex(shard["ShardId"],
                             r"^shardId\-[0-9]{20}\-[a-zA-Z0-9]{1,36}$")

        # clean up
        delete_table(table_name)
        # assert stream has been deleted
        stream_tables = [
            s["TableName"] for s in ddbstreams.list_streams()["Streams"]
        ]
        self.assertNotIn(table_name, stream_tables)
        self.assertNotIn(stream_name, kinesis.list_streams()["StreamNames"])
Exemple #9
0
    def test_stream_spec_and_region_replacement(self, dynamodb):
        ddbstreams = aws_stack.create_external_boto_client("dynamodbstreams")
        kinesis = aws_stack.create_external_boto_client("kinesis")
        table_name = "ddb-%s" % short_uid()
        aws_stack.create_dynamodb_table(
            table_name,
            partition_key=PARTITION_KEY,
            stream_view_type="NEW_AND_OLD_IMAGES",
        )

        table = dynamodb.Table(table_name)

        # assert ARN formats
        expected_arn_prefix = "arn:aws:dynamodb:" + aws_stack.get_local_region(
        )
        assert table.table_arn.startswith(expected_arn_prefix)
        assert table.latest_stream_arn.startswith(expected_arn_prefix)

        # assert stream has been created
        stream_tables = [
            s["TableName"] for s in ddbstreams.list_streams()["Streams"]
        ]
        assert table_name in stream_tables
        stream_name = get_kinesis_stream_name(table_name)
        assert stream_name in kinesis.list_streams()["StreamNames"]

        # assert shard ID formats
        result = ddbstreams.describe_stream(
            StreamArn=table.latest_stream_arn)["StreamDescription"]
        assert "Shards" in result
        for shard in result["Shards"]:
            assert re.match(r"^shardId-[0-9]{20}-[a-zA-Z0-9]{1,36}$",
                            shard["ShardId"])

        # clean up
        delete_table(table_name)
        # assert stream has been deleted
        stream_tables = [
            s["TableName"] for s in ddbstreams.list_streams()["Streams"]
        ]
        assert table_name not in stream_tables
        assert stream_name not in kinesis.list_streams()["StreamNames"]
    def return_response(self, method, path, data, headers, response):
        data = json.loads(to_str(data))

        # update table definitions
        if data and 'TableName' in data and 'KeySchema' in data:
            TABLE_DEFINITIONS[data['TableName']] = data

        if response._content:
            # fix the table ARN (DynamoDBLocal hardcodes "ddblocal" as the region)
            content_replaced = re.sub(
                r'"TableArn"\s*:\s*"arn:aws:dynamodb:ddblocal:([^"]+)"',
                r'"TableArn": "arn:aws:dynamodb:%s:\1"' %
                aws_stack.get_local_region(), to_str(response._content))
            if content_replaced != response._content:
                response._content = content_replaced
                fix_headers_for_updated_response(response)

        action = headers.get('X-Amz-Target')
        if not action:
            return

        record = {
            'eventID': '1',
            'eventVersion': '1.0',
            'dynamodb': {
                'StreamViewType': 'NEW_AND_OLD_IMAGES',
                'SizeBytes': -1
            },
            'awsRegion': DEFAULT_REGION,
            'eventSource': 'aws:dynamodb'
        }
        records = [record]

        if action == '%s.UpdateItem' % ACTION_PREFIX:
            updated_item = find_existing_item(data)
            if not updated_item:
                return
            record['eventName'] = 'MODIFY'
            record['dynamodb']['Keys'] = data['Key']
            record['dynamodb'][
                'OldImage'] = ProxyListenerDynamoDB.thread_local.existing_item
            record['dynamodb']['NewImage'] = updated_item
            record['dynamodb']['SizeBytes'] = len(json.dumps(updated_item))
        elif action == '%s.BatchWriteItem' % ACTION_PREFIX:
            records = []
            for table_name, requests in data['RequestItems'].items():
                for request in requests:
                    put_request = request.get('PutRequest')
                    if put_request:
                        keys = dynamodb_extract_keys(item=put_request['Item'],
                                                     table_name=table_name)
                        if isinstance(keys, Response):
                            return keys
                        new_record = clone(record)
                        new_record['eventName'] = 'INSERT'
                        new_record['dynamodb']['Keys'] = keys
                        new_record['dynamodb']['NewImage'] = put_request[
                            'Item']
                        new_record[
                            'eventSourceARN'] = aws_stack.dynamodb_table_arn(
                                table_name)
                        records.append(new_record)
        elif action == '%s.PutItem' % ACTION_PREFIX:
            existing_item = ProxyListenerDynamoDB.thread_local.existing_item
            ProxyListenerDynamoDB.thread_local.existing_item = None
            record['eventName'] = 'INSERT' if not existing_item else 'MODIFY'
            keys = dynamodb_extract_keys(item=data['Item'],
                                         table_name=data['TableName'])
            if isinstance(keys, Response):
                return keys
            record['dynamodb']['Keys'] = keys
            record['dynamodb']['NewImage'] = data['Item']
            record['dynamodb']['SizeBytes'] = len(json.dumps(data['Item']))
        elif action == '%s.GetItem' % ACTION_PREFIX:
            if response.status_code == 200:
                content = json.loads(to_str(response.content))
                # make sure we append 'ConsumedCapacity', which is properly
                # returned by dynalite, but not by AWS's DynamoDBLocal
                if 'ConsumedCapacity' not in content and data.get(
                        'ReturnConsumedCapacity') in ('TOTAL', 'INDEXES'):
                    content['ConsumedCapacity'] = {
                        'CapacityUnits': 0.5,  # TODO hardcoded
                        'TableName': data['TableName']
                    }
                    response._content = json.dumps(content)
                    fix_headers_for_updated_response(response)
        elif action == '%s.DeleteItem' % ACTION_PREFIX:
            record['eventName'] = 'REMOVE'
            record['dynamodb']['Keys'] = data['Key']
        elif action == '%s.CreateTable' % ACTION_PREFIX:
            if 'StreamSpecification' in data:
                create_dynamodb_stream(data)
            event_publisher.fire_event(
                event_publisher.EVENT_DYNAMODB_CREATE_TABLE,
                payload={'n': event_publisher.get_hash(data['TableName'])})
            return
        elif action == '%s.DeleteTable' % ACTION_PREFIX:
            event_publisher.fire_event(
                event_publisher.EVENT_DYNAMODB_DELETE_TABLE,
                payload={'n': event_publisher.get_hash(data['TableName'])})
            return
        elif action == '%s.UpdateTable' % ACTION_PREFIX:
            if 'StreamSpecification' in data:
                create_dynamodb_stream(data)
            return
        else:
            # nothing to do
            return

        if len(records) > 0 and 'eventName' in records[0]:
            if 'TableName' in data:
                records[0]['eventSourceARN'] = aws_stack.dynamodb_table_arn(
                    data['TableName'])
            forward_to_lambda(records)
            forward_to_ddb_stream(records)
Exemple #11
0
    def return_response(self, method, path, data, headers, response):
        if path.startswith('/shell'):
            return
        data = json.loads(to_str(data))

        # update table definitions
        if data and 'TableName' in data and 'KeySchema' in data:
            TABLE_DEFINITIONS[data['TableName']] = data

        if response._content:
            # fix the table and latest stream ARNs (DynamoDBLocal hardcodes "ddblocal" as the region)
            content_replaced = re.sub(
                r'("TableArn"|"LatestStreamArn"|"StreamArn")\s*:\s*"arn:aws:dynamodb:'
                + 'ddblocal:([^"]+)"',
                r'\1: "arn:aws:dynamodb:%s:\2"' % aws_stack.get_local_region(),
                to_str(response._content))
            if content_replaced != response._content:
                response._content = content_replaced
                fix_headers_for_updated_response(response)

        action = headers.get('X-Amz-Target')
        if not action:
            return

        record = {
            'eventID': '1',
            'eventVersion': '1.0',
            'dynamodb': {
                'StreamViewType': 'NEW_AND_OLD_IMAGES',
                'SizeBytes': -1
            },
            'awsRegion': config.DEFAULT_REGION,
            'eventSource': 'aws:dynamodb'
        }
        records = [record]

        if action == '%s.UpdateItem' % ACTION_PREFIX:
            if response.status_code == 200:
                updated_item = find_existing_item(data)
                if not updated_item:
                    return
                record['eventName'] = 'MODIFY'
                record['dynamodb']['Keys'] = data['Key']
                record['dynamodb']['OldImage'] = self._thread_local(
                    'existing_item')
                record['dynamodb']['NewImage'] = updated_item
                record['dynamodb']['SizeBytes'] = len(json.dumps(updated_item))
        elif action == '%s.BatchWriteItem' % ACTION_PREFIX:
            records = self.prepare_batch_write_item_records(record, data)
        elif action == '%s.TransactWriteItems' % ACTION_PREFIX:
            records = self.prepare_transact_write_item_records(record, data)
        elif action == '%s.PutItem' % ACTION_PREFIX:
            if response.status_code == 200:
                existing_item = self._thread_local('existing_item')
                record[
                    'eventName'] = 'INSERT' if not existing_item else 'MODIFY'
                keys = dynamodb_extract_keys(item=data['Item'],
                                             table_name=data['TableName'])
                if isinstance(keys, Response):
                    return keys
                record['dynamodb']['Keys'] = keys
                record['dynamodb']['NewImage'] = data['Item']
                record['dynamodb']['SizeBytes'] = len(json.dumps(data['Item']))
                if existing_item:
                    record['dynamodb']['OldImage'] = existing_item
        elif action == '%s.GetItem' % ACTION_PREFIX:
            if response.status_code == 200:
                content = json.loads(to_str(response.content))
                # make sure we append 'ConsumedCapacity', which is properly
                # returned by dynalite, but not by AWS's DynamoDBLocal
                if 'ConsumedCapacity' not in content and data.get(
                        'ReturnConsumedCapacity') in ('TOTAL', 'INDEXES'):
                    content['ConsumedCapacity'] = {
                        'CapacityUnits': 0.5,  # TODO hardcoded
                        'TableName': data['TableName']
                    }
                    response._content = json.dumps(content)
                    fix_headers_for_updated_response(response)
        elif action == '%s.DeleteItem' % ACTION_PREFIX:
            if response.status_code == 200:
                old_item = self._thread_local('existing_item')
                record['eventName'] = 'REMOVE'
                record['dynamodb']['Keys'] = data['Key']
                record['dynamodb']['OldImage'] = old_item
        elif action == '%s.CreateTable' % ACTION_PREFIX:
            if 'StreamSpecification' in data:
                if response.status_code == 200:
                    content = json.loads(to_str(response._content))
                    create_dynamodb_stream(
                        data, content['TableDescription']['LatestStreamLabel'])
            event_publisher.fire_event(
                event_publisher.EVENT_DYNAMODB_CREATE_TABLE,
                payload={'n': event_publisher.get_hash(data['TableName'])})
            return
        elif action == '%s.DeleteTable' % ACTION_PREFIX:
            event_publisher.fire_event(
                event_publisher.EVENT_DYNAMODB_DELETE_TABLE,
                payload={'n': event_publisher.get_hash(data['TableName'])})
            return
        elif action == '%s.UpdateTable' % ACTION_PREFIX:
            if 'StreamSpecification' in data:
                if response.status_code == 200:
                    content = json.loads(to_str(response._content))
                    create_dynamodb_stream(
                        data, content['TableDescription']['LatestStreamLabel'])
            return
        else:
            # nothing to do
            return

        if len(records) > 0 and 'eventName' in records[0]:
            if 'TableName' in data:
                records[0]['eventSourceARN'] = aws_stack.dynamodb_table_arn(
                    data['TableName'])
            forward_to_lambda(records)
            forward_to_ddb_stream(records)
Exemple #12
0
    def return_response(self, method, path, data, headers, response):
        data = json.loads(to_str(data))

        # update table definitions
        if data and 'TableName' in data and 'KeySchema' in data:
            TABLE_DEFINITIONS[data['TableName']] = data

        if response._content:
            # fix the table ARN (DynamoDBLocal hardcodes "ddblocal" as the region)
            content_replaced = re.sub(r'"TableArn"\s*:\s*"arn:aws:dynamodb:ddblocal:([^"]+)"',
                r'"TableArn": "arn:aws:dynamodb:%s:\1"' % aws_stack.get_local_region(), to_str(response._content))
            if content_replaced != response._content:
                response._content = content_replaced
                fix_headers_for_updated_response(response)

        action = headers.get('X-Amz-Target')
        if not action:
            return

        record = {
            'eventID': '1',
            'eventVersion': '1.0',
            'dynamodb': {
                'StreamViewType': 'NEW_AND_OLD_IMAGES',
                'SizeBytes': -1
            },
            'awsRegion': DEFAULT_REGION,
            'eventSource': 'aws:dynamodb'
        }
        records = [record]

        if action == '%s.UpdateItem' % ACTION_PREFIX:
            updated_item = find_existing_item(data)
            if not updated_item:
                return
            record['eventName'] = 'MODIFY'
            record['dynamodb']['Keys'] = data['Key']
            record['dynamodb']['OldImage'] = ProxyListenerDynamoDB.thread_local.existing_item
            record['dynamodb']['NewImage'] = updated_item
            record['dynamodb']['SizeBytes'] = len(json.dumps(updated_item))
        elif action == '%s.BatchWriteItem' % ACTION_PREFIX:
            records = []
            for table_name, requests in data['RequestItems'].items():
                for request in requests:
                    put_request = request.get('PutRequest')
                    if put_request:
                        keys = dynamodb_extract_keys(item=put_request['Item'], table_name=table_name)
                        if isinstance(keys, Response):
                            return keys
                        new_record = clone(record)
                        new_record['eventName'] = 'INSERT'
                        new_record['dynamodb']['Keys'] = keys
                        new_record['dynamodb']['NewImage'] = put_request['Item']
                        new_record['eventSourceARN'] = aws_stack.dynamodb_table_arn(table_name)
                        records.append(new_record)
        elif action == '%s.PutItem' % ACTION_PREFIX:
            existing_item = ProxyListenerDynamoDB.thread_local.existing_item
            ProxyListenerDynamoDB.thread_local.existing_item = None
            record['eventName'] = 'INSERT' if not existing_item else 'MODIFY'
            keys = dynamodb_extract_keys(item=data['Item'], table_name=data['TableName'])
            if isinstance(keys, Response):
                return keys
            record['dynamodb']['Keys'] = keys
            record['dynamodb']['NewImage'] = data['Item']
            record['dynamodb']['SizeBytes'] = len(json.dumps(data['Item']))
        elif action == '%s.GetItem' % ACTION_PREFIX:
            if response.status_code == 200:
                content = json.loads(to_str(response.content))
                # make sure we append 'ConsumedCapacity', which is properly
                # returned by dynalite, but not by AWS's DynamoDBLocal
                if 'ConsumedCapacity' not in content and data.get('ReturnConsumedCapacity') in ('TOTAL', 'INDEXES'):
                    content['ConsumedCapacity'] = {
                        'CapacityUnits': 0.5,  # TODO hardcoded
                        'TableName': data['TableName']
                    }
                    response._content = json.dumps(content)
                    fix_headers_for_updated_response(response)
        elif action == '%s.DeleteItem' % ACTION_PREFIX:
            old_item = ProxyListenerDynamoDB.thread_local.existing_item
            record['eventName'] = 'REMOVE'
            record['dynamodb']['Keys'] = data['Key']
            record['dynamodb']['OldImage'] = old_item
        elif action == '%s.CreateTable' % ACTION_PREFIX:
            if 'StreamSpecification' in data:
                create_dynamodb_stream(data)
            event_publisher.fire_event(event_publisher.EVENT_DYNAMODB_CREATE_TABLE,
                payload={'n': event_publisher.get_hash(data['TableName'])})
            return
        elif action == '%s.DeleteTable' % ACTION_PREFIX:
            event_publisher.fire_event(event_publisher.EVENT_DYNAMODB_DELETE_TABLE,
                payload={'n': event_publisher.get_hash(data['TableName'])})
            return
        elif action == '%s.UpdateTable' % ACTION_PREFIX:
            if 'StreamSpecification' in data:
                create_dynamodb_stream(data)
            return
        else:
            # nothing to do
            return

        if len(records) > 0 and 'eventName' in records[0]:
            if 'TableName' in data:
                records[0]['eventSourceARN'] = aws_stack.dynamodb_table_arn(data['TableName'])
            forward_to_lambda(records)
            forward_to_ddb_stream(records)