Beispiel #1
0
 def __init__(self, *arg):
     self.r = Response()
Beispiel #2
0
    def return_response(self, method, path, data, headers, response,
                        request_handler):
        if method == 'OPTIONS' and path == '/':
            # Allow CORS preflight requests to succeed.
            return 200

        if method != 'POST':
            return

        if response.status_code >= 400:
            return response

        region_name = extract_region_from_auth_header(headers)
        req_data = urlparse.parse_qs(to_str(data))
        action = req_data.get('Action', [None])[0]
        content_str = content_str_original = to_str(response.content)

        self._fire_event(req_data, response)

        # patch the response and add missing attributes
        if action == 'GetQueueAttributes':
            content_str = self._add_queue_attributes(path, req_data,
                                                     content_str, headers)

        # patch the response and return the correct endpoint URLs / ARNs
        if action in ('CreateQueue', 'GetQueueUrl', 'ListQueues',
                      'GetQueueAttributes'):
            if config.USE_SSL and '<QueueUrl>http://' in content_str:
                # return https://... if we're supposed to use SSL
                content_str = re.sub(r'<QueueUrl>\s*http://',
                                     r'<QueueUrl>https://', content_str)
            # expose external hostname:port
            external_port = SQS_PORT_EXTERNAL or get_external_port(
                headers, request_handler)
            content_str = re.sub(
                r'<QueueUrl>\s*([a-z]+)://[^<]*:([0-9]+)/([^<]*)\s*</QueueUrl>',
                r'<QueueUrl>\1://%s:%s/\3</QueueUrl>' %
                (HOSTNAME_EXTERNAL, external_port), content_str)
            # fix queue ARN
            content_str = re.sub(
                r'<([a-zA-Z0-9]+)>\s*arn:aws:sqs:elasticmq:([^<]+)</([a-zA-Z0-9]+)>',
                r'<\1>arn:aws:sqs:%s:\2</\3>' % (region_name), content_str)

        if content_str_original != content_str:
            # if changes have been made, return patched response
            new_response = Response()
            new_response.status_code = response.status_code
            new_response.headers = response.headers
            new_response._content = content_str
            new_response.headers['content-length'] = len(new_response._content)
            return new_response

        # Since the following 2 API calls are not implemented in ElasticMQ, we're mocking them
        # and letting them to return an empty response
        if action == 'TagQueue':
            new_response = Response()
            new_response.status_code = 200
            new_response._content = ("""
                <?xml version="1.0"?>
                <TagQueueResponse>
                    <ResponseMetadata>
                        <RequestId>{}</RequestId>
                    </ResponseMetadata>
                </TagQueueResponse>
            """).strip().format(uuid.uuid4())
            return new_response
        elif action == 'ListQueueTags':
            new_response = Response()
            new_response.status_code = 200
            new_response._content = ("""
                <?xml version="1.0"?>
                <ListQueueTagsResponse xmlns="{}">
                    <ListQueueTagsResult/>
                    <ResponseMetadata>
                        <RequestId>{}</RequestId>
                    </ResponseMetadata>
                </ListQueueTagsResponse>
            """).strip().format(XMLNS_SQS, uuid.uuid4())
            return new_response
Beispiel #3
0
    def forward_request(self, method, path, data, headers):
        result = handle_special_request(method, path, data, headers)
        if result is not None:
            return result

        # prepare request headers
        self.prepare_request_headers(headers)

        data_orig = data
        data = data or "{}"
        data = json.loads(to_str(data))
        ddb_client = aws_stack.connect_to_service("dynamodb")
        action = headers.get("X-Amz-Target", "")
        action = action.replace(ACTION_PREFIX, "")

        if self.should_throttle(action):
            return error_response_throughput()

        ProxyListenerDynamoDB.thread_local.existing_item = None
        if "TableName" in data:
            table_def = DynamoDBRegion.get().table_definitions.get(
                data["TableName"]) or {}

        if action == "CreateTable":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if self.table_exists(ddb_client, data["TableName"]):
                return error_response(
                    message="Table already created",
                    error_type="ResourceInUseException",
                    code=400,
                )

        elif action == "CreateGlobalTable":
            return create_global_table(data)

        elif action == "DescribeGlobalTable":
            return describe_global_table(data)

        elif action == "ListGlobalTables":
            return list_global_tables(data)

        elif action == "UpdateGlobalTable":
            return update_global_table(data)

        elif action in ("PutItem", "UpdateItem", "DeleteItem"):
            # find an existing item and store it in a thread-local, so we can access it in return_response,
            # in order to determine whether an item already existed (MODIFY) or not (INSERT)
            try:
                if has_event_sources_or_streams_enabled(data["TableName"]):
                    ProxyListenerDynamoDB.thread_local.existing_item = find_existing_item(
                        data)
            except Exception as e:
                if "ResourceNotFoundException" in str(e):
                    return get_table_not_found_error()
                raise

            # Fix incorrect values if ReturnValues==ALL_OLD and ReturnConsumedCapacity is
            # empty, see https://github.com/localstack/localstack/issues/2049
            if ((data.get("ReturnValues") == "ALL_OLD") or
                (not data.get("ReturnValues"))
                ) and not data.get("ReturnConsumedCapacity"):
                data["ReturnConsumedCapacity"] = "TOTAL"
                return Request(data=json.dumps(data),
                               method=method,
                               headers=headers)

        elif action == "DescribeTable":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()

        elif action == "DeleteTable":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()

        elif action == "BatchWriteItem":
            existing_items = []
            unprocessed_put_items = []
            unprocessed_delete_items = []
            for table_name in sorted(data["RequestItems"].keys()):
                for request in data["RequestItems"][table_name]:
                    for key in ["PutRequest", "DeleteRequest"]:
                        inner_request = request.get(key)
                        if inner_request:
                            if self.should_throttle(action):
                                if key == "PutRequest":
                                    unprocessed_put_items.append(inner_request)
                                elif key == "DeleteRequest":
                                    unprocessed_delete_items.append(
                                        inner_request)
                            else:
                                item = find_existing_item(
                                    inner_request, table_name)
                                existing_items.append(item)
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items
            ProxyListenerDynamoDB.thread_local.unprocessed_put_items = unprocessed_put_items
            ProxyListenerDynamoDB.thread_local.unprocessed_delete_items = unprocessed_delete_items

        elif action == "Query":
            if data.get("IndexName"):
                if not is_index_query_valid(to_str(data["TableName"]),
                                            data.get("Select")):
                    return error_response(
                        message=
                        "One or more parameter values were invalid: Select type ALL_ATTRIBUTES "
                        "is not supported for global secondary index id-index because its projection "
                        "type is not ALL",
                        error_type="ValidationException",
                        code=400,
                    )

        elif action == "TransactWriteItems":
            existing_items = []
            for item in data["TransactItems"]:
                for key in ["Put", "Update", "Delete"]:
                    inner_item = item.get(key)
                    if inner_item:
                        existing_items.append(find_existing_item(inner_item))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items

        elif action == "UpdateTimeToLive":
            # TODO: TTL status is maintained/mocked but no real expiry is happening for items
            response = Response()
            response.status_code = 200
            self._table_ttl_map[data["TableName"]] = {
                "AttributeName":
                data["TimeToLiveSpecification"]["AttributeName"],
                "Status": data["TimeToLiveSpecification"]["Enabled"],
            }
            response._content = json.dumps(
                {"TimeToLiveSpecification": data["TimeToLiveSpecification"]})
            fix_headers_for_updated_response(response)
            return response

        elif action == "DescribeTimeToLive":
            response = Response()
            response.status_code = 200
            if data["TableName"] in self._table_ttl_map:
                if self._table_ttl_map[data["TableName"]]["Status"]:
                    ttl_status = "ENABLED"
                else:
                    ttl_status = "DISABLED"
                response._content = json.dumps({
                    "TimeToLiveDescription": {
                        "AttributeName":
                        self._table_ttl_map[data["TableName"]]
                        ["AttributeName"],
                        "TimeToLiveStatus":
                        ttl_status,
                    }
                })
            else:  # TTL for dynamodb table not set
                response._content = json.dumps({
                    "TimeToLiveDescription": {
                        "TimeToLiveStatus": "DISABLED"
                    }
                })

            fix_headers_for_updated_response(response)
            return response

        elif action in ("TagResource", "UntagResource"):
            response = Response()
            response.status_code = 200
            response._content = ""  # returns an empty body on success.
            fix_headers_for_updated_response(response)
            return response

        elif action == "ListTagsOfResource":
            response = Response()
            response.status_code = 200
            response._content = json.dumps({
                "Tags": [{
                    "Key": k,
                    "Value": v
                } for k, v in DynamoDBRegion.TABLE_TAGS.get(
                    data["ResourceArn"], {}).items()]
            })
            fix_headers_for_updated_response(response)
            return response

        elif action == "EnableKinesisStreamingDestination":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()
            stream = is_kinesis_stream_exists(stream_arn=data["StreamArn"])
            if not stream:
                return error_response(
                    error_type="ValidationException",
                    message=
                    "User does not have a permission to use kinesis stream",
                )

            return dynamodb_enable_kinesis_streaming_destination(
                data, table_def)

        elif action == "DisableKinesisStreamingDestination":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()
            stream = is_kinesis_stream_exists(stream_arn=data["StreamArn"])
            if not stream:
                return error_response(
                    error_type="ValidationException",
                    message=
                    "User does not have a permission to use kinesis stream",
                )

            return dynamodb_disable_kinesis_streaming_destination(
                data, table_def)

        elif action == "DescribeKinesisStreamingDestination":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()
            response = aws_responses.requests_response({
                "KinesisDataStreamDestinations":
                table_def.get("KinesisDataStreamDestinations") or [],
                "TableName":
                data["TableName"],
            })
            return response

        return Request(data=data_orig, method=method, headers=headers)
Beispiel #4
0
def delete_cors(bucket_name):
    # TODO: check if bucket exists, otherwise return 404-like error
    BUCKET_CORS.pop(bucket_name, {})
    response = Response()
    response.status_code = 200
    return response
Beispiel #5
0
    def forward_request(self, method, path, data, headers):
        if path.startswith('/shell'):
            return True
        if method == 'OPTIONS':
            return 200

        data = json.loads(to_str(data))
        ddb_client = aws_stack.connect_to_service('dynamodb')

        if random.random() < config.DYNAMODB_ERROR_PROBABILITY:
            return error_response_throughput()

        action = headers.get('X-Amz-Target')
        if action == '%s.CreateTable' % ACTION_PREFIX:
            # Check if table exists, to avoid error log output from DynamoDBLocal
            table_names = ddb_client.list_tables()['TableNames']
            if to_str(data['TableName']) in table_names:
                return 200
        elif action in ('%s.PutItem' % ACTION_PREFIX,
                        '%s.UpdateItem' % ACTION_PREFIX,
                        '%s.DeleteItem' % ACTION_PREFIX):
            # find an existing item and store it in a thread-local, so we can access it in return_response,
            # in order to determine whether an item already existed (MODIFY) or not (INSERT)
            ProxyListenerDynamoDB.thread_local.existing_item = find_existing_item(
                data)
        elif action == '%s.DescribeTable' % ACTION_PREFIX:
            # Check if table exists, to avoid error log output from DynamoDBLocal
            table_names = ddb_client.list_tables()['TableNames']
            if to_str(data['TableName']) not in table_names:
                response = error_response(
                    message='Cannot do operations on a non-existent table',
                    error_type='ResourceNotFoundException')
                fix_headers_for_updated_response(response)
                return response
        elif action == '%s.DeleteTable' % ACTION_PREFIX:
            # Check if table exists, to avoid error log output from DynamoDBLocal
            table_names = ddb_client.list_tables()['TableNames']
            if to_str(data['TableName']) not in table_names:
                response = error_response(
                    message='Cannot do operations on a non-existent table',
                    error_type='ResourceNotFoundException')
                fix_headers_for_updated_response(response)
                return response
        elif action == '%s.BatchWriteItem' % ACTION_PREFIX:
            existing_items = []
            for table_name in sorted(data['RequestItems'].keys()):
                for request in data['RequestItems'][table_name]:
                    for key in ['PutRequest', 'DeleteRequest']:
                        inner_request = request.get(key)
                        if inner_request:
                            existing_items.append(
                                find_existing_item(inner_request, table_name))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items
        elif action == '%s.TransactWriteItems' % ACTION_PREFIX:
            existing_items = []
            for item in data['TransactItems']:
                for key in ['Put', 'Update', 'Delete']:
                    inner_item = item.get(key)
                    if inner_item:
                        existing_items.append(find_existing_item(inner_item))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items
        elif action == '%s.UpdateTimeToLive' % ACTION_PREFIX:
            # TODO: TTL status is maintained/mocked but no real expiry is happening for items
            response = Response()
            response.status_code = 200
            self._table_ttl_map[data['TableName']] = {
                'AttributeName':
                data['TimeToLiveSpecification']['AttributeName'],
                'Status': data['TimeToLiveSpecification']['Enabled']
            }
            response._content = json.dumps(
                {'TimeToLiveSpecification': data['TimeToLiveSpecification']})
            fix_headers_for_updated_response(response)
            return response
        elif action == '%s.DescribeTimeToLive' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            if data['TableName'] in self._table_ttl_map:
                if self._table_ttl_map[data['TableName']]['Status']:
                    ttl_status = 'ENABLED'
                else:
                    ttl_status = 'DISABLED'
                response._content = json.dumps({
                    'TimeToLiveDescription': {
                        'AttributeName':
                        self._table_ttl_map[data['TableName']]
                        ['AttributeName'],
                        'TimeToLiveStatus':
                        ttl_status
                    }
                })
            else:  # TTL for dynamodb table not set
                response._content = json.dumps({
                    'TimeToLiveDescription': {
                        'TimeToLiveStatus': 'DISABLED'
                    }
                })
            fix_headers_for_updated_response(response)
            return response
        elif action == '%s.TagResource' % ACTION_PREFIX or action == '%s.UntagResource' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            response._content = ''  # returns an empty body on success.
            fix_headers_for_updated_response(response)
            return response
        elif action == '%s.ListTagsOfResource' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            response._content = json.dumps({
                'Tags': []
            })  # TODO: mocked and returns an empty list of tags for now.
            fix_headers_for_updated_response(response)
            return response

        return True
Beispiel #6
0
 def get_object(self, id, version=None):
     resp = Response()
     resp.status_code = 404
     resp.raise_for_status()
Beispiel #7
0
def readfromDB():
    data = request.get_json()
    if data["table"] == "User":
        checkUserSet = {"removeUser", "createRide"}
        if data["caller"] in checkUserSet:
            userExists = User.query.filter_by(username = data["username"]).all()
            responseToReturn = Response()
            if userExists:
                responseToReturn.status_code = 200
            else:
                responseToReturn.status_code = 400
            return (responseToReturn.text, responseToReturn.status_code, responseToReturn.headers.items())
        
        elif data["caller"] == "addUser":
            userExists = User.query.filter_by(username = data["username"]).all()
            responseToReturn = Response()
            if userExists:
                responseToReturn.status_code = 400
            else:
                responseToReturn.status_code = 200
            return (responseToReturn.text, responseToReturn.status_code, responseToReturn.headers.items())
        
    elif data["table"] == "Ride":
        if data["caller"] == "listUpcomingRides":
            rides = Ride.query.all()
            returnObj = []
            for ride in rides:
                if ride.source == data["source"] and ride.destination == data["destination"]:
                    if timeAhead(ride.timestamp):
                        newObj = {"rideId":ride.ride_id, "username":ride.created_by, "timestamp":ride.timestamp}
                        returnObj.append(newObj)
            responseToReturn = Response()
            if not returnObj:
                responseToReturn.status_code = 204
            else:
                responseToReturn.status_code = 200
            return (jsonify(returnObj), responseToReturn.status_code, responseToReturn.headers.items())
        
        elif data["caller"] == "listRideDetails":
            rides = Ride.query.all()
            userArray = []
            dictToReturn = dict()
            responseToReturn = Response()
            rideNotFound = True
            for ride in rides:
                if ride.ride_id == int(data["rideId"]):
                    rideNotFound = False
                    userArray = ride.username.split(", ")
                    if userArray[0] == "":
                        userArray.clear()
                    responseToReturn.status_code = 200
                    keyValues = [("rideId", ride.ride_id), ("created_by", ride.created_by),
                    ("users", userArray), ("timestamp", ride.timestamp), ("source", ride.source),
                    ("destination", ride.destination)]
                    dictToReturn = collections.OrderedDict(keyValues)
                    break
            if rideNotFound:
                responseToReturn.status_code = 204
            return (jsonify(dictToReturn), responseToReturn.status_code, responseToReturn.headers.items())

        elif data["caller"] == "deleteRide":
            rideExists = Ride.query.filter_by(ride_id = data["rideId"]).all()
            responseToReturn = Response()
            if rideExists:
                responseToReturn.status_code = 200
            else:
                responseToReturn.status_code = 400
            return (responseToReturn.text, responseToReturn.status_code, responseToReturn.headers.items())

        elif data["caller"] == "joinRide":
            rideExists = Ride.query.filter_by(ride_id = data["rideId"]).all()
            responseToReturn = Response()
            if rideExists:
                responseToReturn.status_code = 200
            else:
                responseToReturn.status_code = 400
            return (responseToReturn.text, responseToReturn.status_code, responseToReturn.headers.items())
 def get_invalid_authentication_response():
     r = Response()
     r.status_code = 200
     r._content = "<h1>Gateway Timeout</h1>".encode("utf-8")
     return r
 def get_invalid_authentication_response():
     r = Response()
     r.status_code = 200
     r._content = "My invalid JSON string".encode("utf-8")
     return r
 def get_invalid_authentication_response():
     r = Response()
     r.status_code = 200
     r._content = "<h1>Developer Inactive</h1>".encode("utf-8")
     return r
 def get_invalid_authentication_response():
     r = Response()
     r.status_code = 500
     r._content = "".encode("utf-8")
     return r
 def get_invalid_authentication_response():
     r = Response()
     r.status_code = 200
     r._content = "<h1>Not Authorized</h1>".encode("utf-8")
     return r
 def get_invalid_authentication_response():
     r = Response()
     r.status_code = 401
     return r
Beispiel #14
0
    def forward_request(self, method, path, data, headers):

        if path.split('?')[0] == '/health':
            return serve_health_endpoint(method, path, data)
        if method == 'POST' and path == '/graph':
            return serve_resource_graph(data)

        # kill the process if we receive this header
        headers.get(HEADER_KILL_SIGNAL) and os._exit(0)

        target = headers.get('x-amz-target', '')
        auth_header = headers.get('authorization', '')
        host = headers.get('host', '')
        headers[HEADER_LOCALSTACK_EDGE_URL] = 'https://%s' % host

        # extract API details
        api, port, path, host = get_api_from_headers(headers, method=method, path=path, data=data)

        set_default_region_in_headers(headers)

        if port and int(port) < 0:
            return 404

        if not port:
            api, port = get_api_from_custom_rules(method, path, data, headers) or (api, port)

        if not port:
            if method == 'OPTIONS':
                return 200

            if api in ['', None, '_unknown_']:
                truncated = truncate(data)
                if auth_header or target or data or path not in ['/', '/favicon.ico']:
                    LOG.info(('Unable to find forwarding rule for host "%s", path "%s %s", '
                        'target header "%s", auth header "%s", data "%s"') % (
                            host, method, path, target, auth_header, truncated))
            else:
                LOG.info(('Unable to determine forwarding port for API "%s" - please '
                    'make sure this API is enabled via the SERVICES configuration') % api)
            response = Response()
            response.status_code = 404
            response._content = '{"status": "running"}'
            return response

        if api and not headers.get('Authorization'):
            headers['Authorization'] = aws_stack.mock_aws_request_headers(api)['Authorization']

        headers['Host'] = host
        if isinstance(data, dict):
            data = json.dumps(data)

        encoding_type = headers.get('content-encoding') or ''
        if encoding_type.upper() == GZIP_ENCODING and api is not S3:
            headers.set('content-encoding', IDENTITY_ENCODING)
            data = gzip.decompress(data)

        lock_ctx = BOOTSTRAP_LOCK
        if persistence.API_CALLS_RESTORED or is_internal_call_context(headers):
            lock_ctx = empty_context_manager()

        with lock_ctx:
            return do_forward_request(api, method, path, data, headers, port=port)
Beispiel #15
0
    def forward_request(self, method, path, data, headers):
        data = data and json.loads(to_str(data))

        # Paths to match
        regex2 = r'^/restapis/([A-Za-z0-9_\-]+)/([A-Za-z0-9_\-]+)/%s/(.*)$' % PATH_USER_REQUEST

        if re.match(regex2, path):
            search_match = re.search(regex2, path)
            api_id = search_match.group(1)
            stage = search_match.group(2)
            relative_path_w_query_params = '/%s' % search_match.group(3)

            relative_path, query_string_params = extract_query_string_params(
                path=relative_path_w_query_params)

            path_map = helpers.get_rest_api_paths(rest_api_id=api_id)
            try:
                extracted_path, resource = get_resource_for_path(
                    path=relative_path, path_map=path_map)
            except Exception:
                return make_error('Unable to find path %s' % path, 404)

            integrations = resource.get('resourceMethods', {})
            integration = integrations.get(method, {})
            if not integration:
                integration = integrations.get('ANY', {})
            integration = integration.get('methodIntegration')
            if not integration:
                if method == 'OPTIONS' and 'Origin' in headers:
                    # default to returning CORS headers if this is an OPTIONS request
                    return get_cors_response(headers)
                return make_error(
                    'Unable to find integration for path %s' % path, 404)

            uri = integration.get('uri')
            if method == 'POST' and integration['type'] == 'AWS':
                if uri.endswith('kinesis:action/PutRecords'):
                    template = integration['requestTemplates'][
                        APPLICATION_JSON]
                    new_request = aws_stack.render_velocity_template(
                        template, data)

                    # forward records to target kinesis stream
                    headers = aws_stack.mock_aws_request_headers(
                        service='kinesis')
                    headers[
                        'X-Amz-Target'] = kinesis_listener.ACTION_PUT_RECORDS
                    result = common.make_http_request(url=TEST_KINESIS_URL,
                                                      method='POST',
                                                      data=new_request,
                                                      headers=headers)
                    return result

                elif uri.startswith(
                        'arn:aws:apigateway:') and ':sqs:path' in uri:
                    template = integration['requestTemplates'][
                        APPLICATION_JSON]
                    account_id, queue = uri.split('/')[-2:]
                    region_name = uri.split(':')[3]

                    new_request = aws_stack.render_velocity_template(
                        template, data) + '&QueueName=%s' % queue
                    headers = aws_stack.mock_aws_request_headers(
                        service='sqs', region_name=region_name)

                    url = urljoin(
                        TEST_SQS_URL,
                        '%s/%s?%s' % (account_id, queue, new_request))
                    result = common.make_http_request(url,
                                                      method='GET',
                                                      headers=headers)
                    return result

                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'AWS_PROXY':
                if uri.startswith(
                        'arn:aws:apigateway:') and ':lambda:path' in uri:
                    func_arn = uri.split(':lambda:path')[1].split(
                        'functions/')[1].split('/invocations')[0]
                    data_str = json.dumps(data) if isinstance(
                        data, (dict, list)) else data
                    account_id = uri.split(':lambda:path')[1].split(
                        ':function:')[0].split(':')[-1]

                    source_ip = headers['X-Forwarded-For'].split(',')[-2]

                    # Sample request context:
                    # https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-create-api-as-simple-proxy-for-lambda.html#api-gateway-create-api-as-simple-proxy-for-lambda-test
                    request_context = {
                        'path': relative_path,
                        'accountId': account_id,
                        'resourceId': resource.get('id'),
                        'stage': stage,
                        'identity': {
                            'accountId': account_id,
                            'sourceIp': source_ip,
                            'userAgent': headers['User-Agent'],
                        }
                    }

                    try:
                        path_params = extract_path_params(
                            path=relative_path, extracted_path=extracted_path)
                    except Exception:
                        path_params = {}

                    result = lambda_api.process_apigateway_invocation(
                        func_arn,
                        relative_path,
                        data_str,
                        headers,
                        path_params=path_params,
                        query_string_params=query_string_params,
                        method=method,
                        resource_path=path,
                        request_context=request_context)

                    if isinstance(result, FlaskResponse):
                        return flask_to_requests_response(result)

                    response = Response()
                    parsed_result = result if isinstance(
                        result, dict) else json.loads(result)
                    parsed_result = common.json_safe(parsed_result)
                    response.status_code = int(
                        parsed_result.get('statusCode', 200))
                    response.headers.update(parsed_result.get('headers', {}))
                    try:
                        if isinstance(parsed_result['body'], dict):
                            response._content = json.dumps(
                                parsed_result['body'])
                        else:
                            response._content = parsed_result['body']
                    except Exception:
                        response._content = '{}'
                    return response
                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'HTTP':
                function = getattr(requests, method.lower())
                if isinstance(data, dict):
                    data = json.dumps(data)
                result = function(integration['uri'],
                                  data=data,
                                  headers=headers)
                return result

            else:
                msg = (
                    'API Gateway integration type "%s" for method "%s" not yet implemented'
                    % (integration['type'], method))
                LOGGER.warning(msg)
                return make_error(msg, 404)

            return 200

        if re.match(PATH_REGEX_AUTHORIZERS, path):
            return handle_authorizers(method, path, data, headers)

        return True
 def get(self, *args, **kwargs):
     r = Response()
     r.status_code = 200
     r._content = "My invalid JSON string".encode("utf-8")
     return r
Beispiel #17
0
 def get_response(self, content, code=200):
     response = Response()
     response._content = content
     response.status_code = code
     return response
Beispiel #18
0
def modify_and_forward(method=None,
                       path=None,
                       data_bytes=None,
                       headers=None,
                       forward_base_url=None,
                       listeners=None,
                       request_handler=None,
                       client_address=None,
                       server_address=None):
    """ This is the central function that coordinates the incoming/outgoing messages
        with the proxy listeners (message interceptors). """

    listeners = ProxyListener.DEFAULT_LISTENERS + (listeners or [])
    listeners = [lis for lis in listeners if lis]
    data = data_bytes

    def is_full_url(url):
        return re.match(r'[a-zA-Z]+://.+', url)

    if is_full_url(path):
        path = path.split('://', 1)[1]
        path = '/%s' % (path.split('/', 1)[1] if '/' in path else '')
    proxy_url = '%s%s' % (forward_base_url, path)

    for listener in listeners:
        proxy_url = listener.get_forward_url(method, path, data,
                                             headers) or proxy_url

    target_url = path
    if not is_full_url(target_url):
        target_url = '%s%s' % (forward_base_url, target_url)

    # update original "Host" header (moto s3 relies on this behavior)
    if not headers.get('Host'):
        headers['host'] = urlparse(target_url).netloc
    headers['X-Forwarded-For'] = build_x_forwarded_for(headers, client_address,
                                                       server_address)

    response = None
    modified_request = None

    # update listener (pre-invocation)
    for listener in listeners:
        listener_result = listener.forward_request(method=method,
                                                   path=path,
                                                   data=data,
                                                   headers=headers)
        if isinstance(listener_result, Response):
            response = listener_result
            break
        if isinstance(listener_result, LambdaResponse):
            response = listener_result
            break
        if isinstance(listener_result, dict):
            response = Response()
            response._content = json.dumps(json_safe(listener_result))
            response.headers['Content-Type'] = APPLICATION_JSON
            response.status_code = 200
            break
        elif isinstance(listener_result, Request):
            modified_request = listener_result
            data = modified_request.data
            headers = modified_request.headers
            break
        elif http2_server.get_async_generator_result(listener_result):
            return listener_result
        elif listener_result is not True:
            # get status code from response, or use Bad Gateway status code
            code = listener_result if isinstance(listener_result, int) else 503
            response = Response()
            response.status_code = code
            response._content = ''
            response.headers['Content-Length'] = '0'
            append_cors_headers(response)
            return response

    # perform the actual invocation of the backend service
    if response is None:
        headers['Connection'] = headers.get('Connection') or 'close'
        data_to_send = data_bytes
        request_url = proxy_url
        if modified_request:
            if modified_request.url:
                request_url = '%s%s' % (forward_base_url, modified_request.url)
            data_to_send = modified_request.data

        # make sure we drop "chunked" transfer encoding from the headers to be forwarded
        headers.pop('Transfer-Encoding', None)
        requests_method = getattr(requests, method.lower())
        response = requests_method(request_url,
                                   data=data_to_send,
                                   headers=headers,
                                   stream=True,
                                   verify=False)

    # prevent requests from processing response body (e.g., to pass-through gzip encoded content unmodified)
    pass_raw = ((hasattr(response, '_content_consumed')
                 and not response._content_consumed)
                or response.headers.get('content-encoding') in ['gzip'])
    if pass_raw and getattr(response, 'raw', None):
        new_content = response.raw.read()
        if new_content:
            response._content = new_content

    # update listener (post-invocation)
    if listeners:
        update_listener = listeners[-1]
        kwargs = {
            'method': method,
            'path': path,
            'data': data_bytes,
            'headers': headers,
            'response': response
        }
        if 'request_handler' in inspect.getargspec(
                update_listener.return_response)[0]:
            # some listeners (e.g., sqs_listener.py) require additional details like the original
            # request port, hence we pass in a reference to this request handler as well.
            kwargs['request_handler'] = request_handler

        updated_response = update_listener.return_response(**kwargs)
        if isinstance(updated_response, Response):
            response = updated_response

    # allow pre-flight CORS headers by default
    from localstack.services.s3.s3_listener import ProxyListenerS3
    is_s3_listener = any([
        isinstance(service_listener, ProxyListenerS3)
        for service_listener in listeners
    ])
    if not is_s3_listener:
        append_cors_headers(response)

    return response
Beispiel #19
0
def side_effect_feed_url(mocker, client):
    feed_content_res = Response()
    type(feed_content_res).status_code = mocker.PropertyMock(return_value=200)
    type(feed_content_res).content = mocker.PropertyMock(return_value='text_to_parse')

    client.feed_response = feed_content_res
Beispiel #20
0
    def forward_request(self, method, path, data, headers):

        if common.INFRA_STOPPED:
            return 503

        if config.EDGE_FORWARD_URL:
            return do_forward_request_network(
                0,
                method,
                path,
                data,
                headers,
                target_url=config.EDGE_FORWARD_URL)

        # kill the process if we receive this header
        headers.get(HEADER_KILL_SIGNAL) and sys.exit(0)

        target = headers.get("x-amz-target", "")
        auth_header = get_auth_string(method, path, headers, data)
        if auth_header and not headers.get("authorization"):
            headers["authorization"] = auth_header
        host = headers.get("host", "")
        orig_req_url = headers.pop(HEADER_LOCALSTACK_REQUEST_URL, "")
        headers[HEADER_LOCALSTACK_EDGE_URL] = (re.sub(
            r"^([^:]+://[^/]+).*", r"\1", orig_req_url) or "http://%s" % host)

        # extract API details
        api, port, path, host = get_api_from_headers(headers,
                                                     method=method,
                                                     path=path,
                                                     data=data)

        set_default_region_in_headers(headers)

        if port and int(port) < 0:
            return 404

        if not port:
            api, port = get_api_from_custom_rules(method, path, data,
                                                  headers) or (
                                                      api,
                                                      port,
                                                  )

        should_log_trace = is_trace_logging_enabled(headers)
        if api and should_log_trace:
            # print request trace for debugging, if enabled
            LOG.debug('IN(%s): "%s %s" - headers: %s - data: %s', api, method,
                      path, dict(headers), data)

        if not port:
            if method == "OPTIONS":
                if api and should_log_trace:
                    # print request trace for debugging, if enabled
                    LOG.debug('IN(%s): "%s %s" - status: %s', api, method,
                              path, 200)
                return 200

            if api in ["", None, API_UNKNOWN]:
                truncated = truncate(data)
                if auth_header or target or data or path not in [
                        "/", "/favicon.ico"
                ]:
                    LOG.info(
                        ('Unable to find forwarding rule for host "%s", path "%s %s", '
                         'target header "%s", auth header "%s", data "%s"'),
                        host,
                        method,
                        path,
                        target,
                        auth_header,
                        truncated,
                    )
            else:
                LOG.info(
                    ('Unable to determine forwarding port for API "%s" - please '
                     "make sure this API is enabled via the SERVICES configuration"
                     ),
                    api,
                )
            response = Response()
            response.status_code = 404
            response._content = '{"status": "running"}'
            return response

        if api and not headers.get("Authorization"):
            headers["Authorization"] = aws_stack.mock_aws_request_headers(
                api)["Authorization"]
        headers[HEADER_TARGET_API] = str(api)

        headers["Host"] = host
        if isinstance(data, dict):
            data = json.dumps(data)

        encoding_type = headers.get("Content-Encoding") or ""
        if encoding_type.upper() == GZIP_ENCODING.upper(
        ) and api not in SKIP_GZIP_APIS:
            headers.set("Content-Encoding", IDENTITY_ENCODING)
            data = gzip.decompress(data)

        is_internal_call = is_internal_call_context(headers)

        self._require_service(api)

        lock_ctx = BOOTSTRAP_LOCK
        if is_internal_call or persistence.is_persistence_restored():
            lock_ctx = empty_context_manager()

        with lock_ctx:
            result = do_forward_request(api,
                                        method,
                                        path,
                                        data,
                                        headers,
                                        port=port)
            if should_log_trace and result not in [None, False, True]:
                result_status_code = getattr(result, "status_code", result)
                result_headers = getattr(result, "headers", {})
                result_content = getattr(result, "content", "")
                LOG.debug(
                    'OUT(%s): "%s %s" - status: %s - response headers: %s - response: %s',
                    api,
                    method,
                    path,
                    result_status_code,
                    dict(result_headers or {}),
                    result_content,
                )
            return result
 def listener(**kwargs):
     response = Response()
     response.status_code = 200
     response._content = json.dumps(
         kwargs['data']) if kwargs['data'] else '{}'
     return response
Beispiel #22
0
    def forward_request(self, method, path, data, headers):
        data = data and json.loads(to_str(data))

        # Paths to match
        regex2 = r'^/restapis/([A-Za-z0-9_\-]+)/([A-Za-z0-9_\-]+)/%s/(.*)$' % PATH_USER_REQUEST

        if re.match(regex2, path):
            search_match = re.search(regex2, path)
            api_id = search_match.group(1)
            relative_path = '/%s' % search_match.group(3)
            try:
                integration = aws_stack.get_apigateway_integration(
                    api_id, method, path=relative_path)
                assert integration
            except Exception:
                # if we have no exact match, try to find an API resource that contains path parameters
                path_map = get_rest_api_paths(rest_api_id=api_id)
                try:
                    extracted_path, resource = get_resource_for_path(
                        path=relative_path, path_map=path_map)
                except Exception:
                    return make_error('Unable to find path %s' % path, 404)

                integrations = resource.get('resourceMethods', {})
                integration = integrations.get(method, {})
                integration = integration.get('methodIntegration')
                if not integration:

                    if method == 'OPTIONS' and 'Origin' in headers:
                        # default to returning CORS headers if this is an OPTIONS request
                        return get_cors_response(headers)

                    return make_error(
                        'Unable to find integration for path %s' % path, 404)

            uri = integration.get('uri')
            if method == 'POST' and integration['type'] == 'AWS':
                if uri.endswith('kinesis:action/PutRecords'):
                    template = integration['requestTemplates'][
                        APPLICATION_JSON]
                    new_request = aws_stack.render_velocity_template(
                        template, data)

                    # forward records to target kinesis stream
                    headers = aws_stack.mock_aws_request_headers(
                        service='kinesis')
                    headers[
                        'X-Amz-Target'] = kinesis_listener.ACTION_PUT_RECORDS
                    result = common.make_http_request(url=TEST_KINESIS_URL,
                                                      method='POST',
                                                      data=new_request,
                                                      headers=headers)
                    return result
                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'AWS_PROXY':
                if uri.startswith(
                        'arn:aws:apigateway:') and ':lambda:path' in uri:
                    func_arn = uri.split(':lambda:path')[1].split(
                        'functions/')[1].split('/invocations')[0]
                    data_str = json.dumps(data) if isinstance(data,
                                                              dict) else data

                    try:
                        path_params = extract_path_params(
                            path=relative_path, extracted_path=extracted_path)
                    except Exception:
                        path_params = {}
                    result = lambda_api.process_apigateway_invocation(
                        func_arn,
                        relative_path,
                        data_str,
                        headers,
                        path_params=path_params,
                        method=method,
                        resource_path=path)

                    if isinstance(result, FlaskResponse):
                        return flask_to_requests_response(result)

                    response = Response()
                    parsed_result = result if isinstance(
                        result, dict) else json.loads(result)
                    parsed_result = common.json_safe(parsed_result)
                    response.status_code = int(
                        parsed_result.get('statusCode', 200))
                    response.headers.update(parsed_result.get('headers', {}))
                    try:
                        if isinstance(parsed_result['body'], dict):
                            response._content = json.dumps(
                                parsed_result['body'])
                        else:
                            response._content = parsed_result['body']
                    except Exception:
                        response._content = '{}'
                    return response
                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'HTTP':
                function = getattr(requests, method.lower())
                if isinstance(data, dict):
                    data = json.dumps(data)
                result = function(integration['uri'],
                                  data=data,
                                  headers=headers)
                return result

            else:
                msg = (
                    'API Gateway integration type "%s" for method "%s" not yet implemented'
                    % (integration['type'], method))
                LOGGER.warning(msg)
                return make_error(msg, 404)

            return 200

        if re.match(PATH_REGEX_AUTHORIZERS, path):
            return handle_authorizers(method, path, data, headers)

        return True
Beispiel #23
0
    def forward_request(self, method, path, data, headers):

        modified_data = None

        # If this request contains streaming v4 authentication signatures, strip them from the message
        # Related isse: https://github.com/localstack/localstack/issues/98
        # TODO we should evaluate whether to replace moto s3 with scality/S3:
        # https://github.com/scality/S3/issues/237
        if headers.get('x-amz-content-sha256'
                       ) == 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD':
            modified_data = strip_chunk_signatures(data)

        # POST requests to S3 may include a "${filename}" placeholder in the
        # key, which should be replaced with an actual file name before storing.
        if method == 'POST':
            original_data = modified_data or data
            expanded_data = expand_multipart_filename(original_data, headers)
            if expanded_data is not original_data:
                modified_data = expanded_data

        # persist this API call to disk
        persistence.record('s3', method, path, data, headers)

        parsed = urlparse.urlparse(path)
        query = parsed.query
        path = parsed.path
        bucket = path.split('/')[1]
        query_map = urlparse.parse_qs(query)
        if query == 'notification' or 'notification' in query_map:
            response = Response()
            response.status_code = 200
            if method == 'GET':
                # TODO check if bucket exists
                result = '<NotificationConfiguration xmlns="%s">' % XMLNS_S3
                if bucket in S3_NOTIFICATIONS:
                    notif = S3_NOTIFICATIONS[bucket]
                    events_string = '\n'.join(
                        ['<Event>%s</Event>' % e for e in notif['Event']])
                    for dest in ['Queue', 'Topic', 'CloudFunction']:
                        if dest in notif:
                            result += ('''<{dest}Configuration>
                                        <Id>{uid}</Id>
                                        <{dest}>{endpoint}</{dest}>
                                        {events}
                                    </{dest}Configuration>''').format(
                                dest=dest,
                                uid=uuid.uuid4(),
                                endpoint=notif[dest],
                                events=events_string)
                result += '</NotificationConfiguration>'
                response._content = result

            if method == 'PUT':
                parsed = xmltodict.parse(data)
                notif_config = parsed.get('NotificationConfiguration')
                for dest in ['Queue', 'Topic', 'CloudFunction']:
                    config = notif_config.get('%sConfiguration' % (dest))
                    if config:
                        # TODO: what if we have multiple destinations - would we overwrite the config?
                        notification_details = {
                            'Id': config.get('Id'),
                            'Event': config.get('Event'),
                            dest: config.get(dest),
                            'Filter': config.get('Filter')
                        }
                        S3_NOTIFICATIONS[bucket] = json.loads(
                            json.dumps(notification_details))

            # return response for ?notification request
            return response

        if query == 'cors' or 'cors' in query_map:
            if method == 'GET':
                return get_cors(bucket)
            if method == 'PUT':
                return set_cors(bucket, data)
            if method == 'DELETE':
                return delete_cors(bucket)

        if modified_data:
            return Request(data=modified_data, headers=headers, method=method)
        return True
Beispiel #24
0
def _create_response_object(content, code, headers):
    response = Response()
    response.status_code = code
    response.headers = headers
    response._content = content
    return response
Beispiel #25
0
 def forward_request(self, method, path, data, headers):
     records.append(data)
     response = Response()
     response.status_code = 200
     response._content = ''
     return response
Beispiel #26
0
def make_error(message, code=400):
    response = Response()
    response.status_code = code
    response._content = json.dumps({'message': message})
    return response
Beispiel #27
0
    def return_response(self, method, path, data, headers, response,
                        request_handler):

        if method == 'POST' and path == '/':
            req_data = urlparse.parse_qs(to_str(data))
            action = req_data.get('Action', [None])[0]
            event_type = None
            queue_url = None
            if action == 'CreateQueue':
                event_type = event_publisher.EVENT_SQS_CREATE_QUEUE
                response_data = xmltodict.parse(response.content)
                if 'CreateQueueResponse' in response_data:
                    queue_url = response_data['CreateQueueResponse'][
                        'CreateQueueResult']['QueueUrl']
            elif action == 'DeleteQueue':
                event_type = event_publisher.EVENT_SQS_DELETE_QUEUE
                queue_url = req_data.get('QueueUrl', [None])[0]

            if event_type and queue_url:
                event_publisher.fire_event(
                    event_type,
                    payload={'u': event_publisher.get_hash(queue_url)})

            # patch the response and return the correct endpoint URLs
            if action in ('CreateQueue', 'GetQueueUrl', 'ListQueues'):
                content_str = content_str_original = to_str(response.content)
                new_response = Response()
                new_response.status_code = response.status_code
                new_response.headers = response.headers
                if config.USE_SSL and '<QueueUrl>http://' in content_str:
                    # return https://... if we're supposed to use SSL
                    content_str = re.sub(r'<QueueUrl>\s*http://',
                                         r'<QueueUrl>https://', content_str)
                # expose external hostname:port
                external_port = get_external_port(headers, request_handler)
                content_str = re.sub(
                    r'<QueueUrl>\s*([a-z]+)://[^<]*:([0-9]+)/([^<]*)\s*</QueueUrl>',
                    r'<QueueUrl>\1://%s:%s/\3</QueueUrl>' %
                    (HOSTNAME_EXTERNAL, external_port), content_str)
                new_response._content = content_str
                if content_str_original != new_response._content:
                    # if changes have been made, return patched response
                    new_response.headers['content-length'] = len(
                        new_response._content)
                    return new_response

            # Since the following 2 API calls are not implemented in ElasticMQ, we're mocking them
            # and letting them to return an empty response
            if action == 'TagQueue':
                new_response = Response()
                new_response.status_code = 200
                new_response._content = (
                    '<?xml version="1.0"?>'
                    '<TagQueueResponse>'
                    '<ResponseMetadata>'  # noqa: W291
                    '<RequestId>{}</RequestId>'  # noqa: W291
                    '</ResponseMetadata>'  # noqa: W291
                    '</TagQueueResponse>').format(uuid.uuid4())
                return new_response
            elif action == 'ListQueueTags':
                new_response = Response()
                new_response.status_code = 200
                new_response._content = (
                    '<?xml version="1.0"?>'
                    '<ListQueueTagsResponse xmlns="{}">'
                    '<ListQueueTagsResult/>'  # noqa: W291
                    '<ResponseMetadata>'  # noqa: W291
                    '<RequestId>{}</RequestId>'  # noqa: W291
                    '</ResponseMetadata>'  # noqa: W291
                    '</ListQueueTagsResponse>').format(XMLNS_SQS, uuid.uuid4())
                return new_response
Beispiel #28
0
def modify_and_forward(method=None,
                       path=None,
                       data_bytes=None,
                       headers=None,
                       forward_base_url=None,
                       listeners=None,
                       request_handler=None,
                       client_address=None,
                       server_address=None):
    listeners = [lis for lis in (listeners or []) if lis]
    data = data_bytes

    def is_full_url(url):
        return re.match(r'[a-zA-Z]+://.+', url)

    if is_full_url(path):
        path = path.split('://', 1)[1]
        path = '/%s' % (path.split('/', 1)[1] if '/' in path else '')
    proxy_url = '%s%s' % (forward_base_url, path)

    for listener in listeners:
        proxy_url = listener.get_forward_url(method, path, data,
                                             headers) or proxy_url

    target_url = path
    if not is_full_url(target_url):
        target_url = '%s%s' % (forward_base_url, target_url)

    # update original "Host" header (moto s3 relies on this behavior)
    if not headers.get('Host'):
        headers['host'] = urlparse(target_url).netloc
    if 'localhost.atlassian.io' in headers.get('Host'):
        headers['host'] = 'localhost'
    headers['X-Forwarded-For'] = build_x_forwarded_for(headers, client_address,
                                                       server_address)

    response = None
    modified_request = None

    # update listener (pre-invocation)
    for listener in listeners:
        listener_result = listener.forward_request(method=method,
                                                   path=path,
                                                   data=data,
                                                   headers=headers)
        if isinstance(listener_result, Response):
            response = listener_result
            break
        if isinstance(listener_result, LambdaResponse):
            response = listener_result
            break
        if isinstance(listener_result, dict):
            response = Response()
            response._content = json.dumps(json_safe(listener_result))
            response.headers['Content-Type'] = APPLICATION_JSON
            response.status_code = 200
            break
        elif isinstance(listener_result, Request):
            modified_request = listener_result
            data = modified_request.data
            headers = modified_request.headers
            break
        elif listener_result is not True:
            # get status code from response, or use Bad Gateway status code
            code = listener_result if isinstance(listener_result, int) else 503
            response = Response()
            response._content = ''
            # TODO add CORS headers here?
            response.headers['Content-Length'] = '0'
            response.status_code = code
            return response

    # perform the actual invocation of the backend service
    if response is None:
        headers['Connection'] = headers.get('Connection') or 'close'
        data_to_send = data_bytes
        request_url = proxy_url
        if modified_request:
            if modified_request.url:
                request_url = '%s%s' % (forward_base_url, modified_request.url)
            data_to_send = modified_request.data

        requests_method = getattr(requests, method.lower())
        response = requests_method(request_url,
                                   data=data_to_send,
                                   headers=headers,
                                   stream=True)

        # prevent requests from processing response body
        if not response._content_consumed and response.raw:
            response._content = response.raw.read()

    # update listener (post-invocation)
    if listeners:
        update_listener = listeners[-1]
        kwargs = {
            'method': method,
            'path': path,
            'data': data_bytes,
            'headers': headers,
            'response': response
        }
        if 'request_handler' in inspect.getargspec(
                update_listener.return_response)[0]:
            # some listeners (e.g., sqs_listener.py) require additional details like the original
            # request port, hence we pass in a reference to this request handler as well.
            kwargs['request_handler'] = request_handler
        updated_response = update_listener.return_response(**kwargs)
        if isinstance(updated_response, Response):
            response = updated_response

    return response
Beispiel #29
0
    def return_response(self, method, path, data, headers, response):
        action = headers.get('X-Amz-Target', '').split('.')[-1]
        data, encoding_type = self.decode_content(data or '{}', True)
        response._content = self.replace_in_encoded(response.content or '')
        records = []
        if action in ('CreateStream', 'DeleteStream'):
            event_type = (event_publisher.EVENT_KINESIS_CREATE_STREAM
                          if action == 'CreateStream' else
                          event_publisher.EVENT_KINESIS_DELETE_STREAM)
            payload = {'n': event_publisher.get_hash(data.get('StreamName'))}
            if action == 'CreateStream':
                payload['s'] = data.get('ShardCount')
            event_publisher.fire_event(event_type, payload=payload)
        elif action == 'PutRecord':
            response_body = self.decode_content(response.content)
            # Note: avoid adding 'encryptionType':'NONE' in the event_record, as this breaks .NET Lambdas
            event_record = {
                'approximateArrivalTimestamp': epoch_timestamp(),
                'data': data['Data'],
                'partitionKey': data['PartitionKey'],
                'sequenceNumber': response_body.get('SequenceNumber')
            }
            event_records = [event_record]
            stream_name = data['StreamName']
            lambda_api.process_kinesis_records(event_records, stream_name)
        elif action == 'PutRecords':
            event_records = []
            response_body = self.decode_content(response.content)
            if 'Records' in response_body:
                response_records = response_body['Records']
                records = data['Records']
                for index in range(0, len(records)):
                    record = records[index]
                    # Note: avoid adding 'encryptionType':'NONE' in the event_record, as this breaks .NET Lambdas
                    event_record = {
                        'approximateArrivalTimestamp':
                        epoch_timestamp(),
                        'data':
                        record['Data'],
                        'partitionKey':
                        record['PartitionKey'],
                        'sequenceNumber':
                        response_records[index].get('SequenceNumber')
                    }
                    event_records.append(event_record)
                stream_name = data['StreamName']
                lambda_api.process_kinesis_records(event_records, stream_name)
        elif action == 'UpdateShardCount':
            # Currently kinesalite, which backs the Kinesis implementation for localstack, does
            # not support UpdateShardCount:
            # https://github.com/mhart/kinesalite/issues/61
            #
            # [Terraform](https://www.terraform.io) makes the call to UpdateShardCount when it
            # applies Kinesis resources. A Terraform run fails when this is not present.
            #
            # The code that follows just returns a successful response, bypassing the 400
            # response that kinesalite returns.
            #
            response = Response()
            response.status_code = 200
            content = {
                'CurrentShardCount': 1,
                'StreamName': data['StreamName'],
                'TargetShardCount': data['TargetShardCount']
            }
            response.encoding = 'UTF-8'
            response._content = json.dumps(content)
            return response
        elif action == 'GetRecords':
            sdk_v2 = self.sdk_is_v2(
                headers.get('User-Agent', '').split(' ')[0])
            results, encoding_type = self.decode_content(
                response.content, True)

            records = results.get('Records', [])
            if not records:
                return response

            for record in records:
                if sdk_v2:
                    record['ApproximateArrivalTimestamp'] = int(
                        record['ApproximateArrivalTimestamp'])
                if not isinstance(record['Data'], str):
                    # Remove double quotes from data written as bytes
                    # https://github.com/localstack/localstack/issues/3588
                    tmp = bytearray(record['Data']['data'])
                    if len(tmp) >= 2 and tmp[0] == tmp[-1] == b'"'[0]:
                        tmp = tmp[1:-1]

                    if encoding_type == APPLICATION_JSON:
                        record['Data'] = to_str(base64.b64encode(tmp))
                    else:
                        record['Data'] = to_str(tmp)

                else:
                    tmp = base64.b64decode(record['Data'])
                    if len(tmp) >= 2 and tmp[0] == tmp[-1] == b'"'[0]:
                        tmp = tmp[1:-1]
                    record['Data'] = to_str(base64.b64encode(tmp))

            response._content = cbor2.dumps(
                results) if encoding_type == APPLICATION_CBOR else json.dumps(
                    results)
            return response
Beispiel #30
0
 def __init__(self, *args, **kwargs):
     self.log = LogHandler(self.name, file=False)
     self.response = Response()