def return_response(self, method, path, data, headers, response): # fix backend issue (missing support for API documentation) if re.match(r"/restapis/[^/]+/documentation/versions", path): if response.status_code == 404: return requests_response({"position": "1", "items": []}) # add missing implementations if response.status_code == 404: data = data and json.loads(to_str(data)) result = None if path == "/account": result = handle_accounts(method, path, data, headers) elif path.startswith("/vpclinks"): result = handle_vpc_links(method, path, data, headers) elif re.match(PATH_REGEX_PATH_MAPPINGS, path): result = handle_base_path_mappings(method, path, data, headers) elif re.match(PATH_REGEX_CLIENT_CERTS, path): result = handle_client_certificates(method, path, data, headers) if result is not None: response.status_code = 200 aws_responses.set_response_content( response, result, getattr(result, "headers", {})) # keep track of API regions for faster lookup later on if method == "POST" and path == "/restapis": content = json.loads(to_str(response.content)) api_id = content["id"] region = aws_stack.extract_region_from_auth_header(headers) API_REGIONS[api_id] = region # publish event if method == "POST" and path == "/restapis": content = json.loads(to_str(response.content)) event_publisher.fire_event( event_publisher.EVENT_APIGW_CREATE_API, payload={"a": event_publisher.get_hash(content["id"])}, ) api_regex = r"^/restapis/([a-zA-Z0-9\-]+)$" if method == "DELETE" and re.match(api_regex, path): api_id = re.sub(api_regex, r"\1", path) event_publisher.fire_event( event_publisher.EVENT_APIGW_DELETE_API, payload={"a": event_publisher.get_hash(api_id)}, )
def return_response(self, method, path, data, headers, response): # TODO: clean up logic below! # fix backend issue (missing support for API documentation) if ( re.match(r"/restapis/[^/]+/documentation/versions", path) and response.status_code == 404 ): return requests_response({"position": "1", "items": []}) # keep track of API regions for faster lookup later on # TODO - to be removed - see comment for API_REGIONS variable if method == "POST" and path == "/restapis": content = json.loads(to_str(response.content)) api_id = content["id"] region = aws_stack.extract_region_from_auth_header(headers) API_REGIONS[api_id] = region
def get_raw_metrics(self, request: Request): region = aws_stack.extract_region_from_auth_header(request.headers) backend = cloudwatch_backends.get(region) if backend: result = backend.metric_data else: result = [] result = [{ "ns": r.namespace, "n": r.name, "v": r.value, "t": r.timestamp, "d": [{ "n": d.name, "v": d.value } for d in r.dimensions], } for r in result] return {"metrics": result}
def forward_request(self, method, path, data, headers): req_data = self.parse_request_data(method, path, data) if req_data: if req_data.get('Action', [None])[0] == 'SendMessage': queue_url = req_data.get('QueueUrl', [path.partition('?')[0]])[0] queue_name = queue_url[queue_url.rindex('/') + 1:] message_body = req_data.get('MessageBody', [None])[0] message_attributes = self.format_message_attributes(req_data) region_name = extract_region_from_auth_header(headers) process_result = lambda_api.process_sqs_message( message_body, message_attributes, queue_name, region_name=region_name) if process_result: # If a Lambda was listening, do not add the message to the queue new_response = Response() new_response._content = SUCCESSFUL_SEND_MESSAGE_XML_TEMPLATE.format( message_attr_hash=md5(data), message_body_hash=md5(message_body), message_id=str(uuid.uuid4())) new_response.status_code = 200 return new_response if 'QueueName' in req_data: encoded_data = urlencode( req_data, doseq=True) if method == 'POST' else '' modified_url = None if method == 'GET': base_path = path.partition('?')[0] modified_url = '%s?%s' % (base_path, urlencode(req_data, doseq=True)) request = Request(data=encoded_data, url=modified_url, headers=headers, method=method) return request return True
def _send_message(self, path, data, req_data, headers): queue_url = self._queue_url(path, req_data, headers) queue_name = queue_url[queue_url.rindex('/') + 1:] message_body = req_data.get('MessageBody', [None])[0] message_attributes = self.format_message_attributes(req_data) region_name = extract_region_from_auth_header(headers) process_result = lambda_api.process_sqs_message( message_body, message_attributes, queue_name, region_name=region_name) if process_result: # If a Lambda was listening, do not add the message to the queue new_response = Response() new_response._content = SUCCESSFUL_SEND_MESSAGE_XML_TEMPLATE.format( message_attr_hash=md5(data), message_body_hash=md5(message_body), message_id=str(uuid.uuid4())) new_response.status_code = 200 return new_response
def post_request(): action = request.headers.get('x-amz-target') data = json.loads(to_str(request.data)) response = None if action == '%s.ListDeliveryStreams' % ACTION_HEADER_PREFIX: response = { 'DeliveryStreamNames': get_delivery_stream_names(), 'HasMoreDeliveryStreams': False } elif action == '%s.CreateDeliveryStream' % ACTION_HEADER_PREFIX: stream_name = data['DeliveryStreamName'] region_name = extract_region_from_auth_header(request.headers) response = create_stream( stream_name, delivery_stream_type=data.get('DeliveryStreamType'), delivery_stream_type_configuration=data.get( 'KinesisStreamSourceConfiguration'), s3_destination=data.get('S3DestinationConfiguration'), elasticsearch_destination=data.get( 'ElasticsearchDestinationConfiguration'), tags=data.get('Tags'), region_name=region_name) elif action == '%s.DeleteDeliveryStream' % ACTION_HEADER_PREFIX: stream_name = data['DeliveryStreamName'] response = delete_stream(stream_name) elif action == '%s.DescribeDeliveryStream' % ACTION_HEADER_PREFIX: stream_name = data['DeliveryStreamName'] response = get_stream(stream_name) if not response: return error_not_found(stream_name) response = {'DeliveryStreamDescription': response} elif action == '%s.PutRecord' % ACTION_HEADER_PREFIX: stream_name = data['DeliveryStreamName'] record = data['Record'] put_record(stream_name, record) response = {'RecordId': str(uuid.uuid4())} elif action == '%s.PutRecordBatch' % ACTION_HEADER_PREFIX: stream_name = data['DeliveryStreamName'] records = data['Records'] put_records(stream_name, records) response = {'FailedPutCount': 0, 'RequestResponses': []} elif action == '%s.UpdateDestination' % ACTION_HEADER_PREFIX: stream_name = data['DeliveryStreamName'] version_id = data['CurrentDeliveryStreamVersionId'] destination_id = data['DestinationId'] s3_update = data[ 'S3DestinationUpdate'] if 'S3DestinationUpdate' in data else None update_destination(stream_name=stream_name, destination_id=destination_id, s3_update=s3_update, version_id=version_id) es_update = data[ 'ESDestinationUpdate'] if 'ESDestinationUpdate' in data else None update_destination(stream_name=stream_name, destination_id=destination_id, es_update=es_update, version_id=version_id) response = {} elif action == '%s.ListTagsForDeliveryStream' % ACTION_HEADER_PREFIX: response = get_delivery_stream_tags(data['DeliveryStreamName'], data.get('ExclusiveStartTagKey'), data.get('Limit', 50)) else: response = error_response('Unknown action "%s"' % action, code=400, error_type='InvalidAction') if isinstance(response, dict): response = jsonify(response) return response
def return_response(self, method, path, data, headers, response, request_handler): if method == 'OPTIONS' and path == '/': # Allow CORS preflight requests to succeed. return 200 if method != 'POST': return if response.status_code >= 400: return response region_name = extract_region_from_auth_header(headers) req_data = urlparse.parse_qs(to_str(data)) action = req_data.get('Action', [None])[0] content_str = content_str_original = to_str(response.content) self._fire_event(req_data, response) # patch the response and add missing attributes if action == 'GetQueueAttributes': content_str = self._add_queue_attributes(path, req_data, content_str, headers) # patch the response and return the correct endpoint URLs / ARNs if action in ('CreateQueue', 'GetQueueUrl', 'ListQueues', 'GetQueueAttributes'): if config.USE_SSL and '<QueueUrl>http://' in content_str: # return https://... if we're supposed to use SSL content_str = re.sub(r'<QueueUrl>\s*http://', r'<QueueUrl>https://', content_str) # expose external hostname:port external_port = SQS_PORT_EXTERNAL or get_external_port( headers, request_handler) content_str = re.sub( r'<QueueUrl>\s*([a-z]+)://[^<]*:([0-9]+)/([^<]*)\s*</QueueUrl>', r'<QueueUrl>\1://%s:%s/\3</QueueUrl>' % (HOSTNAME_EXTERNAL, external_port), content_str) # fix queue ARN content_str = re.sub( r'<([a-zA-Z0-9]+)>\s*arn:aws:sqs:elasticmq:([^<]+)</([a-zA-Z0-9]+)>', r'<\1>arn:aws:sqs:%s:\2</\3>' % (region_name), content_str) if content_str_original != content_str: # if changes have been made, return patched response new_response = Response() new_response.status_code = response.status_code new_response.headers = response.headers new_response._content = content_str new_response.headers['content-length'] = len(new_response._content) return new_response # Since the following 2 API calls are not implemented in ElasticMQ, we're mocking them # and letting them to return an empty response if action == 'TagQueue': new_response = Response() new_response.status_code = 200 new_response._content = (""" <?xml version="1.0"?> <TagQueueResponse> <ResponseMetadata> <RequestId>{}</RequestId> </ResponseMetadata> </TagQueueResponse> """).strip().format(uuid.uuid4()) return new_response elif action == 'ListQueueTags': new_response = Response() new_response.status_code = 200 new_response._content = (""" <?xml version="1.0"?> <ListQueueTagsResponse xmlns="{}"> <ListQueueTagsResult/> <ResponseMetadata> <RequestId>{}</RequestId> </ResponseMetadata> </ListQueueTagsResponse> """).strip().format(XMLNS_SQS, uuid.uuid4()) return new_response
def forward_request(self, method, path, data, headers): if method == 'OPTIONS': return 200 data = data or '' data_orig = data data = aws_stack.fix_account_id_in_arns(data, existing='%3A{}%3Astack/'.format(TEST_AWS_ACCOUNT_ID), replace='%3A{}%3Astack/'.format(MOTO_CLOUDFORMATION_ACCOUNT_ID), colon_delimiter='') data = aws_stack.fix_account_id_in_arns(data, existing='%3A{}%3AchangeSet/'.format(TEST_AWS_ACCOUNT_ID), replace='%3A{}%3AchangeSet/'.format(MOTO_CLOUDFORMATION_ACCOUNT_ID), colon_delimiter='') data = aws_stack.fix_account_id_in_arns(data, existing=TEST_AWS_ACCOUNT_ID, replace=MOTO_ACCOUNT_ID, colon_delimiter='%3A') req_data = None if method == 'POST' and path == '/': req_data = urlparse.parse_qs(to_str(data)) req_data = dict([(k, v[0]) for k, v in req_data.items()]) action = req_data.get('Action') stack_name = req_data.get('StackName') if action == 'CreateStack': event_publisher.fire_event( event_publisher.EVENT_CLOUDFORMATION_CREATE_STACK, payload={'n': event_publisher.get_hash(stack_name)} ) if action == 'DeleteStack': client = aws_stack.connect_to_service( 'cloudformation', region_name=aws_stack.extract_region_from_auth_header(headers) ) stack_resources = client.list_stack_resources(StackName=stack_name)['StackResourceSummaries'] template_deployer.delete_stack(stack_name, stack_resources) if action == 'DescribeStackEvents': # fix an issue where moto cannot handle ARNs as stack names (or missing names) run_fix = not stack_name if stack_name: if stack_name.startswith('arn:aws:cloudformation'): run_fix = True pattern = r'arn:aws:cloudformation:[^:]+:[^:]+:stack/([^/]+)(/.+)?' stack_name = re.sub(pattern, r'\1', stack_name) if run_fix: stack_names = [stack_name] if stack_name else self._list_stack_names() client = aws_stack.connect_to_service('cloudformation') events = [] for stack_name in stack_names: tmp = client.describe_stack_events(StackName=stack_name)['StackEvents'][:1] events.extend(tmp) events = [{'member': e} for e in events] response_content = '<StackEvents>%s</StackEvents>' % obj_to_xml(events) return make_response('DescribeStackEvents', response_content) if req_data: if action == 'ValidateTemplate': return validate_template(req_data) if action in ['CreateStack', 'UpdateStack', 'CreateChangeSet']: do_replace_url = is_real_s3_url(req_data.get('TemplateURL')) if do_replace_url: req_data['TemplateURL'] = convert_s3_to_local_url(req_data['TemplateURL']) url = req_data.get('TemplateURL', '') is_custom_local_endpoint = is_local_service_url(url) and '://localhost:' not in url modified_template_body = transform_template(req_data) if not modified_template_body and is_custom_local_endpoint: modified_template_body = get_template_body(req_data) if modified_template_body: req_data.pop('TemplateURL', None) req_data['TemplateBody'] = modified_template_body if modified_template_body or do_replace_url: data = urlparse.urlencode(req_data, doseq=True) return Request(data=data, headers=headers, method=method) if data != data_orig or action in ['DescribeChangeSet', 'ExecuteChangeSet']: return Request(data=urlparse.urlencode(req_data, doseq=True), headers=headers, method=method) return True
def post_request(): action = request.headers.get("x-amz-target", "") action = action.split(".")[-1] data = json.loads(to_str(request.data)) response = None if action == "ListDeliveryStreams": response = { "DeliveryStreamNames": get_delivery_stream_names(), "HasMoreDeliveryStreams": False, } elif action == "CreateDeliveryStream": stream_name = data["DeliveryStreamName"] region_name = extract_region_from_auth_header(request.headers) _s3_destination = data.get("S3DestinationConfiguration") or data.get( "ExtendedS3DestinationConfiguration") response = create_stream( stream_name, delivery_stream_type=data.get("DeliveryStreamType"), delivery_stream_type_configuration=data.get( "KinesisStreamSourceConfiguration"), s3_destination=_s3_destination, elasticsearch_destination=data.get( "ElasticsearchDestinationConfiguration"), http_destination=data.get("HttpEndpointDestinationConfiguration"), tags=data.get("Tags"), region_name=region_name, ) elif action == "DeleteDeliveryStream": stream_name = data["DeliveryStreamName"] response = delete_stream(stream_name) elif action == "DescribeDeliveryStream": stream_name = data["DeliveryStreamName"] response = get_stream(stream_name, format_s3_dest=True) if not response: return error_not_found(stream_name) response = {"DeliveryStreamDescription": response} elif action == "PutRecord": stream_name = data["DeliveryStreamName"] record = data["Record"] response = put_record(stream_name, record) elif action == "PutRecordBatch": stream_name = data["DeliveryStreamName"] records = data["Records"] put_records(stream_name, records) request_responses = [] for i in records: request_responses.append({"RecordId": str(uuid.uuid4())}) response = {"FailedPutCount": 0, "RequestResponses": request_responses} elif action == "UpdateDestination": stream_name = data["DeliveryStreamName"] version_id = data["CurrentDeliveryStreamVersionId"] destination_id = data["DestinationId"] s3_update = data[ "S3DestinationUpdate"] if "S3DestinationUpdate" in data else None update_destination( stream_name=stream_name, destination_id=destination_id, s3_update=s3_update, version_id=version_id, ) es_update = data[ "ESDestinationUpdate"] if "ESDestinationUpdate" in data else None update_destination( stream_name=stream_name, destination_id=destination_id, elasticsearch_update=es_update, version_id=version_id, ) http_update = data.get("HttpEndpointDestinationUpdate") update_destination( stream_name=stream_name, destination_id=destination_id, http_update=http_update, version_id=version_id, ) response = {} elif action == "ListTagsForDeliveryStream": response = get_delivery_stream_tags( data["DeliveryStreamName"], data.get("ExclusiveStartTagKey"), data.get("Limit", 50), ) else: response = error_response('Unknown action "%s"' % action, code=400, error_type="InvalidAction") if isinstance(response, dict): response = jsonify(response) return response