def get_encryption(bucket_name): # TODO return actual value result = { 'ServerSideEncryptionConfiguration': {} } body = xmltodict.unparse(result) return requests_response(body)
def forward_request(self, method, path, data, headers): if method == 'OPTIONS': return 200 req_data = parse_request_data(method, path, data) if req_data: action = req_data.get('Action', [None])[0] if action == 'SetQueueAttributes': queue_url = _queue_url(path, req_data, headers) forward_attrs = _set_queue_attributes(queue_url, req_data) if len(req_data) != len(forward_attrs): # make sure we only forward the supported attributes to the backend return _get_attributes_forward_request(method, path, headers, req_data, forward_attrs) elif action == 'DeleteQueue': QUEUE_ATTRIBUTES.pop(_queue_url(path, req_data, headers), None) elif action == 'ListDeadLetterSourceQueues': queue_url = _queue_url(path, req_data, headers) headers = {'content-type': 'application/xhtml+xml'} content_str = _list_dead_letter_source_queues(QUEUE_ATTRIBUTES, queue_url) return requests_response(content_str, headers=headers) if 'QueueName' in req_data: encoded_data = urlencode(req_data, doseq=True) if method == 'POST' else '' modified_url = None if method == 'GET': base_path = path.partition('?')[0] modified_url = '%s?%s' % (base_path, urlencode(req_data, doseq=True)) return Request(data=encoded_data, url=modified_url, headers=headers, method=method) return True
def dynamodb_disable_kinesis_streaming_destination(data, table_def): if table_def.get('KinesisDataStreamDestinations'): if table_def['KinesisDataStreamDestinationStatus'] == 'ACTIVE': for dest in table_def['KinesisDataStreamDestinations']: if dest['StreamArn'] == data['StreamArn'] and dest[ 'DestinationStatus'] == 'ACTIVE': dest['DestinationStatus'] = 'DISABLED' dest[ 'DestinationStatusDescription'] = 'Stream is disabled', table_def[ 'KinesisDataStreamDestinationStatus'] = 'DISABLED' response = aws_responses.requests_response({ 'DestinationStatus': 'DISABLED', 'StreamArn': data['StreamArn'], 'TableName': data['TableName'] }) return response return error_response( error_type='ValidationException', message= 'Table is not in a valid state to disable Kinesis Streaming Destination:' 'DisableKinesisStreamingDestination must be ACTIVE to perform DISABLE operation.' )
def return_response(self, method, path, data, headers, response): if response.status_code >= 500: return updated_response = None match = not updated_response and re.match(PATH_DELEGATION_SETS, path) if match: updated_response = handle_delegation_sets_request(match, method, data) if response.ok and updated_response is None: add_vpc_info_to_response(path, response) return if path.endswith("/associatevpc") or path.endswith("/disassociatevpc"): updated_response = handle_associate_vpc_request(method, path, data) if method == "GET" and "/hostedzonesbyvpc" in path: updated_response = handle_hosted_zones_by_vpc_request(method, path, data) if not isinstance(updated_response, dict): return updated_response updated_response[list(updated_response.keys())[0]]["@xmlns"] = XMLNS_ROUTE53 body = xmltodict.unparse(updated_response) return requests_response(body)
def dynamodb_enable_kinesis_streaming_destination(data, table_def): if (table_def.get('KinesisDataStreamDestinationStatus') in ['DISABLED', 'ENABLE_FAILED', None]): table_def['KinesisDataStreamDestinations'] = table_def.get( 'KinesisDataStreamDestinations') or [] # remove the stream destination if already present table_def['KinesisDataStreamDestinations'] = [ t for t in table_def['KinesisDataStreamDestinations'] if t['StreamArn'] != data['StreamArn'] ] # append the active stream destination at the end of the list table_def['KinesisDataStreamDestinations'].append({ 'DestinationStatus': 'ACTIVE', 'DestinationStatusDescription': 'Stream is active', 'StreamArn': data['StreamArn'] }) table_def['KinesisDataStreamDestinationStatus'] = 'ACTIVE' response = aws_responses.requests_response({ 'DestinationStatus': 'ACTIVE', 'StreamArn': data['StreamArn'], 'TableName': data['TableName'] }) return response return error_response( error_type='ValidationException', message='Table is not in a valid state to enable Kinesis Streaming ' 'Destination:EnableKinesisStreamingDestination must be DISABLED or ENABLE_FAILED ' 'to perform ENABLE operation.')
def return_response(self, method, path, data, headers, response): # fix backend issue (missing support for API documentation) if re.match(r'/restapis/[^/]+/documentation/versions', path): if response.status_code == 404: return requests_response({'position': '1', 'items': []}) # add missing implementations if response.status_code == 404: data = data and json.loads(to_str(data)) result = None if path == '/account': result = handle_accounts(method, path, data, headers) if re.match(PATH_REGEX_PATH_MAPPINGS, path): result = hande_base_path_mappings(method, path, data, headers) if result is not None: response.status_code = 200 aws_responses.set_response_content(response, result) # publish event if method == 'POST' and path == '/restapis': content = json.loads(to_str(response.content)) event_publisher.fire_event(event_publisher.EVENT_APIGW_CREATE_API, payload={'a': event_publisher.get_hash(content['id'])}) api_regex = r'^/restapis/([a-zA-Z0-9\-]+)$' if method == 'DELETE' and re.match(api_regex, path): api_id = re.sub(api_regex, r'\1', path) event_publisher.fire_event(event_publisher.EVENT_APIGW_DELETE_API, payload={'a': event_publisher.get_hash(api_id)})
def no_such_key_error(resource, requestId=None, status_code=400): result = {'Error': {'Code': 'NoSuchKey', 'Message': 'The resource you requested does not exist', 'Resource': resource, 'RequestId': requestId}} content = xmltodict.unparse(result) headers = {'content-type': 'application/xml'} return requests_response(content, status_code=status_code, headers=headers)
def convert_flask_to_httpretty_response_call(*args, **kwargs): try: return convert_flask_to_httpretty_response_call_orig( *args, **kwargs) except NotImplementedError as e: action = request.headers.get("X-Amz-Target") action = action or f"{request.method} {urlparse(request.url).path}" if action == "POST /": # try to extract action from exception string match = re.match( r"The ([a-zA-Z0-9_-]+) action has not been implemented", str(e)) if match: action = snake_to_camel_case(match.group(1)) service = extract_service_name_from_auth_header(request.headers) msg = f"API action '{action}' for service '{service}' not yet implemented" response = requests_error_response(request.headers, msg, code=501) if config.MOCK_UNIMPLEMENTED: is_json = is_json_request(request.headers) headers = { HEADER_CONTENT_TYPE: APPLICATION_JSON if is_json else APPLICATION_XML } content = "{}" if is_json else "<Response />" # TODO: return proper mocked response response = requests_response(content, headers=headers) LOG.info( f"{msg}. Returning mocked response due to MOCK_UNIMPLEMENTED=1" ) else: LOG.info(msg) # TODO: publish analytics event ... return requests_to_flask_response(response)
def token_expired_error(resource, requestId=None, status_code=400): result = {'Error': {'Code': 'ExpiredToken', 'Message': 'The provided token has expired.', 'Resource': resource, 'RequestId': requestId}} content = xmltodict.unparse(result) headers = {'content-type': 'application/xml'} return requests_response(content, status_code=status_code, headers=headers)
def get_lifecycle(bucket_name): lifecycle = BUCKET_LIFECYCLE.get(bucket_name) if not lifecycle: # TODO: check if bucket exists, otherwise return 404-like error lifecycle = {'LifecycleConfiguration': {}} body = xmltodict.unparse(lifecycle) return requests_response(body)
def forward_request(self, method, path, data, headers): event = json.loads(to_str(data)) events.append(event) paths_list.append(path) auth = headers.get("Api") or headers.get("Authorization") if auth not in headers_list: headers_list.append(auth) if headers.get("target_header"): headers_list.append(headers.get("target_header")) if "client_id" in event: oauth_data.update( { "client_id": event.get("client_id"), "client_secret": event.get("client_secret"), "header_value": headers.get("oauthheader"), "body_value": event.get("oauthbody"), "path": path, } ) return requests_response( { "access_token": token, "token_type": "Bearer", "expires_in": 86400, } )
def dynamodb_disable_kinesis_streaming_destination(data, table_def): if table_def.get("KinesisDataStreamDestinations"): if table_def["KinesisDataStreamDestinationStatus"] == "ACTIVE": for dest in table_def["KinesisDataStreamDestinations"]: if dest["StreamArn"] == data["StreamArn"] and dest[ "DestinationStatus"] == "ACTIVE": dest["DestinationStatus"] = "DISABLED" dest["DestinationStatusDescription"] = ( "Stream is disabled", ) table_def[ "KinesisDataStreamDestinationStatus"] = "DISABLED" response = aws_responses.requests_response({ "DestinationStatus": "DISABLED", "StreamArn": data["StreamArn"], "TableName": data["TableName"], }) return response return error_response( error_type="ValidationException", message= "Table is not in a valid state to disable Kinesis Streaming Destination:" "DisableKinesisStreamingDestination must be ACTIVE to perform DISABLE operation.", )
def forward_request(self, method, path, data, headers): if method == 'OPTIONS': return 200 req_data = parse_request_data(method, path, data) if is_sqs_queue_url(path) and method == 'GET': if not headers.get('Authorization'): headers['Authorization'] = aws_stack.mock_aws_request_headers(service='sqs')['Authorization'] method = 'POST' req_data = {'Action': 'GetQueueUrl', 'Version': API_VERSION, 'QueueName': path.split('/')[-1]} if req_data: action = req_data.get('Action') if action in ('SendMessage', 'SendMessageBatch') and SQS_BACKEND_IMPL == 'moto': # check message contents for key, value in req_data.items(): if not re.match(MSG_CONTENT_REGEX, str(value)): return make_requests_error(code=400, code_string='InvalidMessageContents', message='Message contains invalid characters') elif action == 'SetQueueAttributes': # TODO remove this function if we stop using ElasticMQ entirely queue_url = _queue_url(path, req_data, headers) if SQS_BACKEND_IMPL == 'elasticmq': forward_attrs = _set_queue_attributes(queue_url, req_data) if len(req_data) != len(forward_attrs): # make sure we only forward the supported attributes to the backend return _get_attributes_forward_request(method, path, headers, req_data, forward_attrs) elif action == 'CreateQueue': changed_attrs = _fix_dlq_arn_in_attributes(req_data) if changed_attrs: return _get_attributes_forward_request(method, path, headers, req_data, changed_attrs) elif action == 'DeleteQueue': queue_url = _queue_url(path, req_data, headers) QUEUE_ATTRIBUTES.pop(queue_url, None) sns_listener.unsubscribe_sqs_queue(queue_url) elif action == 'ListDeadLetterSourceQueues': # TODO remove this function if we stop using ElasticMQ entirely queue_url = _queue_url(path, req_data, headers) if SQS_BACKEND_IMPL == 'elasticmq': headers = {'content-type': 'application/xhtml+xml'} content_str = _list_dead_letter_source_queues(QUEUE_ATTRIBUTES, queue_url) return requests_response(content_str, headers=headers) if 'QueueName' in req_data: encoded_data = urlencode(req_data, doseq=True) if method == 'POST' else '' modified_url = None if method == 'GET': base_path = path.partition('?')[0] modified_url = '%s?%s' % (base_path, urlencode(req_data, doseq=True)) return Request(data=encoded_data, url=modified_url, headers=headers, method=method) return True
def return_response(self, method, path, data, headers, response, request_handler): if method == 'OPTIONS' and path == '/': # Allow CORS preflight requests to succeed. return 200 if method != 'POST': return region_name = aws_stack.get_region() req_data = urlparse.parse_qs(to_str(data)) action = req_data.get('Action', [None])[0] content_str = content_str_original = to_str(response.content) if response.status_code >= 400: return response _fire_event(req_data, response) # patch the response and add missing attributes if action == 'GetQueueAttributes': content_str = _add_queue_attributes(path, req_data, content_str, headers) # patch the response and return the correct endpoint URLs / ARNs if action in ('CreateQueue', 'GetQueueUrl', 'ListQueues', 'GetQueueAttributes'): if config.USE_SSL and '<QueueUrl>http://' in content_str: # return https://... if we're supposed to use SSL content_str = re.sub(r'<QueueUrl>\s*http://', r'<QueueUrl>https://', content_str) # expose external hostname:port external_port = SQS_PORT_EXTERNAL or get_external_port( headers, request_handler) content_str = re.sub( r'<QueueUrl>\s*([a-z]+)://[^<]*:([0-9]+)/([^<]*)\s*</QueueUrl>', r'<QueueUrl>\1://%s:%s/\3</QueueUrl>' % (HOSTNAME_EXTERNAL, external_port), content_str) # fix queue ARN content_str = re.sub( r'<([a-zA-Z0-9]+)>\s*arn:aws:sqs:elasticmq:([^<]+)</([a-zA-Z0-9]+)>', r'<\1>arn:aws:sqs:%s:\2</\3>' % region_name, content_str) if action == 'CreateQueue': queue_url = re.match(r'.*<QueueUrl>(.*)</QueueUrl>', content_str, re.DOTALL).group(1) _set_queue_attributes(queue_url, req_data) # instruct listeners to fetch new SQS message if action in ('SendMessage', 'SendMessageBatch'): _process_sent_message(path, req_data, headers) if content_str_original != content_str: # if changes have been made, return patched response response.headers['content-length'] = len(content_str) return requests_response(content_str, headers=response.headers, status_code=response.status_code)
def get_replication(bucket_name): bucket_name = normalize_bucket_name(bucket_name) exists, code, body = is_bucket_available(bucket_name) if not exists: return requests_response(body, status_code=code) replication = BUCKET_REPLICATIONS.get(bucket_name) status_code = 200 if not replication: replication = { 'Error': { 'Code': 'ReplicationConfigurationNotFoundError', 'Message': 'The replication configuration was not found' } } status_code = 404 body = xmltodict.unparse(replication) return requests_response(body, status_code=status_code)
def get_object_lock(bucket_name): bucket_name = normalize_bucket_name(bucket_name) exists, code, body = is_bucket_available(bucket_name) if not exists: return requests_response(body, status_code=code) lock_config = OBJECT_LOCK_CONFIGS.get(bucket_name) status_code = 200 if not lock_config: lock_config = { 'Error': { 'Code': 'ObjectLockConfigurationNotFoundError', 'Message': 'Object Lock configuration does not exist for this bucket' } } status_code = 404 body = xmltodict.unparse(lock_config) return requests_response(body, status_code=status_code)
def get_encryption(bucket_name): bucket_name = normalize_bucket_name(bucket_name) exists, code, body = is_bucket_available(bucket_name) if not exists: return requests_response(body, status_code=code) encryption = BUCKET_ENCRYPTIONS.get(bucket_name) status_code = 200 if not encryption: encryption = { 'Error': { 'Code': 'ServerSideEncryptionConfigurationNotFoundError', 'Message': 'The server side encryption configuration was not found' } } status_code = 404 body = xmltodict.unparse(encryption) return requests_response(body, status_code=status_code)
def handle_special_request(method, path, data, headers): if path.startswith('/shell') or method == 'GET': if path == '/shell': headers = {'Refresh': '0; url=%s/shell/' % config.TEST_DYNAMODB_URL} return aws_responses.requests_response('', headers=headers) return True if method == 'OPTIONS': return 200
def set_lifecycle(bucket_name, lifecycle): bucket_name = normalize_bucket_name(bucket_name) exists, code, body = is_bucket_available(bucket_name) if not exists: return requests_response(body, status_code=code) if isinstance(to_str(lifecycle), six.string_types): lifecycle = xmltodict.parse(lifecycle) BUCKET_LIFECYCLE[bucket_name] = lifecycle return 200
def set_replication(bucket_name, replication): bucket_name = normalize_bucket_name(bucket_name) exists, code, body = is_bucket_available(bucket_name) if not exists: return requests_response(body, status_code=code) if isinstance(to_str(replication), six.string_types): replication = xmltodict.parse(replication) BUCKET_REPLICATIONS[bucket_name] = replication return 200
def set_encryption(bucket_name, encryption): bucket_name = normalize_bucket_name(bucket_name) exists, code, body = is_bucket_available(bucket_name) if not exists: return requests_response(body, status_code=code) if isinstance(to_str(encryption), six.string_types): encryption = xmltodict.parse(encryption) BUCKET_ENCRYPTIONS[bucket_name] = encryption return 200
def set_object_lock(bucket_name, lock_config): bucket_name = normalize_bucket_name(bucket_name) exists, code, body = is_bucket_available(bucket_name) if not exists: return requests_response(body, status_code=code) if isinstance(to_str(lock_config), six.string_types): lock_config = xmltodict.parse(lock_config) OBJECT_LOCK_CONFIGS[bucket_name] = lock_config return 200
def get_lifecycle(bucket_name): bucket_name = normalize_bucket_name(bucket_name) exists, code, body = is_bucket_available(bucket_name) if not exists: return requests_response(body, status_code=code) lifecycle = BUCKET_LIFECYCLE.get(bucket_name) status_code = 200 if not lifecycle: lifecycle = { 'Error': { 'Code': 'NoSuchLifecycleConfiguration', 'Message': 'The lifecycle configuration does not exist' } } status_code = 404 body = xmltodict.unparse(lifecycle) return requests_response(body, status_code=status_code)
def handle_special_request(method, path, data, headers): if path.startswith("/shell") or method == "GET": if path == "/shell": headers = { "Refresh": "0; url=%s/shell/" % config.TEST_DYNAMODB_URL } return aws_responses.requests_response("", headers=headers) return True if method == "OPTIONS": return 200
def forward_request(self, method, path, data, headers): if method == 'OPTIONS': return 200 req_data = parse_request_data(method, path, data) if req_data: action = req_data.get('Action') if action in ('SendMessage', 'SendMessageBatch') and BACKEND_IMPL == 'moto': # check message contents for key, value in req_data.items(): if not re.match(MSG_CONTENT_REGEX, str(value)): return make_requests_error( code=400, code_string='InvalidMessageContents', message='Message contains invalid characters') elif action == 'SetQueueAttributes': queue_url = _queue_url(path, req_data, headers) forward_attrs = _set_queue_attributes(queue_url, req_data) if len(req_data) != len(forward_attrs): # make sure we only forward the supported attributes to the backend return _get_attributes_forward_request( method, path, headers, req_data, forward_attrs) elif action == 'DeleteQueue': queue_url = _queue_url(path, req_data, headers) QUEUE_ATTRIBUTES.pop(queue_url, None) sns_listener.unsubscribe_sqs_queue(queue_url) elif action == 'ListDeadLetterSourceQueues': queue_url = _queue_url(path, req_data, headers) headers = {'content-type': 'application/xhtml+xml'} content_str = _list_dead_letter_source_queues( QUEUE_ATTRIBUTES, queue_url) return requests_response(content_str, headers=headers) if 'QueueName' in req_data: encoded_data = urlencode( req_data, doseq=True) if method == 'POST' else '' modified_url = None if method == 'GET': base_path = path.partition('?')[0] modified_url = '%s?%s' % (base_path, urlencode(req_data, doseq=True)) return Request(data=encoded_data, url=modified_url, headers=headers, method=method) return True
def forward_request(self, method, path, data, headers): event = json.loads(to_str(data)) events.append(event) paths_list.append(path) auth = headers.get("Api") or headers.get("Authorization") if auth not in headers_list: headers_list.append(auth) return requests_response({ "access_token": token, "token_type": "Bearer", "expires_in": 86400, })
def get_replication(bucket_name): bucket_name = normalize_bucket_name(bucket_name) replication = BUCKET_REPLICATIONS.get(bucket_name) status_code = 200 if not replication: # TODO: check if bucket actually exists replication = { 'Error': { 'Code': 'ReplicationConfigurationNotFoundError', 'Message': 'The replication configuration was not found' } } status_code = 404 body = xmltodict.unparse(replication) return requests_response(body, status_code=status_code)
def get_lifecycle(bucket_name): bucket_name = normalize_bucket_name(bucket_name) lifecycle = BUCKET_LIFECYCLE.get(bucket_name) status_code = 200 if not lifecycle: # TODO: check if bucket actually exists lifecycle = { 'Error': { 'Code': 'NoSuchLifecycleConfiguration', 'Message': 'The lifecycle configuration does not exist' } } status_code = 404 body = xmltodict.unparse(lifecycle) return requests_response(body, status_code=status_code)
def return_response(self, method, path, data, headers, response): # fix backend issue (missing support for API documentation) if re.match(r'/restapis/[^/]+/documentation/versions', path): if response.status_code == 404: return requests_response({'position': '1', 'items': []}) # publish event if method == 'POST' and path == '/restapis': content = json.loads(to_str(response.content)) event_publisher.fire_event(event_publisher.EVENT_APIGW_CREATE_API, payload={'a': event_publisher.get_hash(content['id'])}) api_regex = r'^/restapis/([a-zA-Z0-9\-]+)$' if method == 'DELETE' and re.match(api_regex, path): api_id = re.sub(api_regex, r'\1', path) event_publisher.fire_event(event_publisher.EVENT_APIGW_DELETE_API, payload={'a': event_publisher.get_hash(api_id)})
def check_content_md5(data, headers): actual = md5(strip_chunk_signatures(data)) expected = headers['Content-MD5'] try: expected = to_str(codecs.encode(base64.b64decode(expected), 'hex')) except Exception: expected = '__invalid__' if actual != expected: result = { 'Error': { 'Code': 'InvalidDigest', 'Message': 'The Content-MD5 you specified was invalid' } } content = xmltodict.unparse(result) return requests_response(content, status_code=400)