def DynamoDB_Table_get_cfn_attribute(self, attribute_name): try: return DynamoDB_Table_get_cfn_attribute_orig(self, attribute_name) except Exception: if attribute_name == 'Arn': return aws_stack.dynamodb_table_arn(table_name=self.name) raise
def prepare_transact_write_item_records(self, record, data): records = [] for i, request in enumerate(data['TransactItems']): put_request = request.get('Put') if put_request: existing_item = self._thread_local('existing_items')[i] table_name = put_request['TableName'] keys = dynamodb_extract_keys(item=put_request['Item'], table_name=table_name) if isinstance(keys, Response): return keys new_record = clone(record) new_record['eventName'] = 'INSERT' if not existing_item else 'MODIFY' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['NewImage'] = put_request['Item'] if existing_item: new_record['dynamodb']['OldImage'] = existing_item new_record['eventSourceARN'] = aws_stack.dynamodb_table_arn(table_name) records.append(new_record) update_request = request.get('Update') if update_request: table_name = update_request['TableName'] keys = update_request['Key'] if isinstance(keys, Response): return keys updated_item = find_existing_item(update_request, table_name) if not updated_item: return new_record = clone(record) new_record['eventName'] = 'MODIFY' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['OldImage'] = self._thread_local('existing_items')[i] new_record['dynamodb']['NewImage'] = updated_item new_record['eventSourceARN'] = aws_stack.dynamodb_table_arn(table_name) records.append(new_record) delete_request = request.get('Delete') if delete_request: table_name = delete_request['TableName'] keys = delete_request['Key'] if isinstance(keys, Response): return keys new_record = clone(record) new_record['eventName'] = 'REMOVE' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['OldImage'] = self._thread_local('existing_items')[i] new_record['eventSourceARN'] = aws_stack.dynamodb_table_arn(table_name) records.append(new_record) return records
def DynamoDB2_Table_get_cfn_attribute(self, attribute_name): if attribute_name == 'Arn': return aws_stack.dynamodb_table_arn(table_name=self.name) elif attribute_name == 'StreamArn': if (self.stream_specification or {}).get('StreamEnabled'): return aws_stack.dynamodb_stream_arn(self.name, 'latest') return None raise UnformattedGetAttTemplateException()
def delete_streams(table_arn): table_arn = aws_stack.dynamodb_table_arn(table_arn) stream = DDB_STREAMS.pop(table_arn, None) if stream: table_name = table_arn.split('/')[-1] stream_name = get_kinesis_stream_name(table_name) try: aws_stack.connect_to_service('kinesis').delete_stream( StreamName=stream_name) # sleep a bit, as stream deletion can take some time ... time.sleep(1) except Exception: pass # ignore "stream not found" errors
def prepare_batch_write_item_records(self, record, data): records = [] i = 0 for table_name in sorted(data['RequestItems'].keys()): for request in data['RequestItems'][table_name]: put_request = request.get('PutRequest') if put_request: existing_item = self._thread_local('existing_items')[i] keys = dynamodb_extract_keys(item=put_request['Item'], table_name=table_name) if isinstance(keys, Response): return keys new_record = clone(record) new_record[ 'eventName'] = 'INSERT' if not existing_item else 'MODIFY' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['NewImage'] = put_request['Item'] if existing_item: new_record['dynamodb']['OldImage'] = existing_item new_record[ 'eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) delete_request = request.get('DeleteRequest') if delete_request: keys = delete_request['Key'] if isinstance(keys, Response): return keys new_record = clone(record) new_record['eventName'] = 'REMOVE' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['OldImage'] = self._thread_local( 'existing_items')[i] new_record[ 'eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) i += 1 return records
def add_dynamodb_stream(table_name, view_type='NEW_AND_OLD_IMAGES', enabled=True): if enabled: # create kinesis stream as a backend stream_name = get_kinesis_stream_name(table_name) aws_stack.create_kinesis_stream(stream_name) stream = { 'StreamArn': aws_stack.dynamodb_stream_arn(table_name=table_name), 'TableName': table_name, 'StreamLabel': 'TODO', 'StreamStatus': 'ENABLED', 'KeySchema': [], 'Shards': [] } table_arn = aws_stack.dynamodb_table_arn(table_name) DDB_STREAMS[table_arn] = stream
def has_event_sources_or_streams_enabled(table_name, cache={}): if not table_name: return table_arn = aws_stack.dynamodb_table_arn(table_name) cached = cache.get(table_arn) if isinstance(cached, bool): return cached sources = lambda_api.get_event_sources(source_arn=table_arn) result = False if sources: result = True if not result and dynamodbstreams_api.get_stream_for_table(table_arn): result = True cache[table_arn] = result return result
def add_dynamodb_stream(table_name, latest_stream_label=None, view_type='NEW_AND_OLD_IMAGES', enabled=True): if enabled: # create kinesis stream as a backend stream_name = get_kinesis_stream_name(table_name) aws_stack.create_kinesis_stream(stream_name) latest_stream_label = latest_stream_label or 'latest' stream = { 'StreamArn': aws_stack.dynamodb_stream_arn( table_name=table_name, latest_stream_label=latest_stream_label), 'TableName': table_name, 'StreamLabel': latest_stream_label, 'StreamStatus': 'ENABLED', 'KeySchema': [], 'Shards': [] } table_arn = aws_stack.dynamodb_table_arn(table_name) DDB_STREAMS[table_arn] = stream # record event event_publisher.fire_event(event_publisher.EVENT_DYNAMODB_CREATE_STREAM, payload={'n': event_publisher.get_hash(table_name)})
def has_event_sources_or_streams_enabled(table_name, cache={}): if not table_name: return table_arn = aws_stack.dynamodb_table_arn(table_name) cached = cache.get(table_arn) if isinstance(cached, bool): return cached sources = lambda_api.get_event_sources(source_arn=table_arn) result = False if sources: result = True if not result and dynamodbstreams_api.get_stream_for_table(table_arn): result = True cache[table_arn] = result # if kinesis streaming destination is enabled # get table name from table_arn # since batch_wrtie and transact write operations passing table_arn instead of table_name table_name = table_arn.split('/', 1)[-1] if not result and TABLE_DEFINITIONS.get(table_name): if TABLE_DEFINITIONS[table_name].get( 'KinesisDataStreamDestinationStatus') == 'ACTIVE': result = True return result
def has_event_sources_or_streams_enabled(table_name, cache={}): if not table_name: return table_arn = aws_stack.dynamodb_table_arn(table_name) cached = cache.get(table_arn) if isinstance(cached, bool): return cached sources = lambda_api.get_event_sources(source_arn=table_arn) result = False if sources: result = True if not result and dynamodbstreams_api.get_stream_for_table(table_arn): result = True cache[table_arn] = result # if kinesis streaming destination is enabled # get table name from table_arn # since batch_wrtie and transact write operations passing table_arn instead of table_name table_name = table_arn.split("/", 1)[-1] table_definitions = DynamoDBRegion.get().table_definitions if not result and table_definitions.get(table_name): if table_definitions[table_name].get( "KinesisDataStreamDestinationStatus") == "ACTIVE": result = True return result
def get_physical_resource_id(self, attribute=None, **kwargs): table_name = self.props.get('TableName') if attribute in REF_ID_ATTRS: return table_name return aws_stack.dynamodb_table_arn(table_name)
def return_response(self, method, path, data, headers, response): # update table definitions if data and 'TableName' in data and 'KeySchema' in data: TABLE_DEFINITIONS[data['TableName']] = data action = headers.get('X-Amz-Target') if not action: return response_data = json.loads(to_str(response.content)) record = { "eventID": "1", "eventVersion": "1.0", "dynamodb": { "StreamViewType": "NEW_AND_OLD_IMAGES", "SequenceNumber": "1", "SizeBytes": -1 }, "awsRegion": DEFAULT_REGION, "eventSource": "aws:dynamodb" } records = [record] if action == '%s.UpdateItem' % ACTION_PREFIX: req = {'TableName': data['TableName'], 'Key': data['Key']} new_item = aws_stack.dynamodb_get_item_raw(req) if 'Item' not in new_item: if 'message' in new_item: ddb_client = aws_stack.connect_to_service('dynamodb') table_names = ddb_client.list_tables()['TableNames'] msg = ( 'Unable to get item from DynamoDB (existing tables: %s): %s' % (table_names, new_item['message'])) LOGGER.warning(msg) return record['eventName'] = 'MODIFY' record['dynamodb']['Keys'] = data['Key'] record['dynamodb']['NewImage'] = new_item['Item'] elif action == '%s.BatchWriteItem' % ACTION_PREFIX: records = [] for table_name, requests in data['RequestItems'].items(): for request in requests: put_request = request.get('PutRequest') if put_request: keys = dynamodb_extract_keys(item=put_request['Item'], table_name=table_name) if isinstance(keys, Response): return keys new_record = clone(record) new_record['eventName'] = 'INSERT' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['NewImage'] = put_request[ 'Item'] new_record[ 'eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) elif action == '%s.PutItem' % ACTION_PREFIX: record['eventName'] = 'INSERT' keys = dynamodb_extract_keys(item=data['Item'], table_name=data['TableName']) if isinstance(keys, Response): return keys record['dynamodb']['Keys'] = keys record['dynamodb']['NewImage'] = data['Item'] elif action == '%s.GetItem' % ACTION_PREFIX: if response.status_code == 200: content = json.loads(to_str(response.content)) # make sure we append 'ConsumedCapacity', which is properly # returned by dynalite, but not by AWS's DynamoDBLocal if 'ConsumedCapacity' not in content and data.get( 'ReturnConsumedCapacity') in ('TOTAL', 'INDEXES'): content['ConsumedCapacity'] = { 'CapacityUnits': 0.5, # TODO hardcoded 'TableName': data['TableName'] } response._content = json.dumps(content) response.headers['content-length'] = len(response.content) response.headers['x-amz-crc32'] = calculate_crc32(response) elif action == '%s.DeleteItem' % ACTION_PREFIX: record['eventName'] = 'REMOVE' record['dynamodb']['Keys'] = data['Key'] elif action == '%s.CreateTable' % ACTION_PREFIX: if 'StreamSpecification' in data: create_dynamodb_stream(data) return elif action == '%s.UpdateTable' % ACTION_PREFIX: if 'StreamSpecification' in data: create_dynamodb_stream(data) return else: # nothing to do return if 'TableName' in data: record['eventSourceARN'] = aws_stack.dynamodb_table_arn( data['TableName']) forward_to_lambda(records) forward_to_ddb_stream(records)
def prepare_batch_write_item_records(self, record, data): records = [] unprocessed_items = {'PutRequest': {}, 'DeleteRequest': {}} i = 0 for table_name in sorted(data['RequestItems'].keys()): # Add stream view type to record if ddb stream is enabled stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record['dynamodb']['StreamViewType'] = stream_spec[ 'StreamViewType'] for request in data['RequestItems'][table_name]: put_request = request.get('PutRequest') existing_items = self._thread_local('existing_items') if put_request: if existing_items and len(existing_items) > i: existing_item = existing_items[i] keys = dynamodb_extract_keys(item=put_request['Item'], table_name=table_name) if isinstance(keys, Response): return keys new_record = clone(record) new_record['eventID'] = short_uid() new_record['dynamodb']['SizeBytes'] = len( json.dumps(put_request['Item'])) new_record[ 'eventName'] = 'INSERT' if not existing_item else 'MODIFY' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['NewImage'] = put_request[ 'Item'] if existing_item: new_record['dynamodb']['OldImage'] = existing_item new_record[ 'eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) unprocessed_put_items = self._thread_local( 'unprocessed_put_items') if unprocessed_put_items and len( unprocessed_put_items) > i: unprocessed_item = unprocessed_put_items[i] if unprocessed_item: unprocessed_items['PutRequest'].update( json.loads(json.dumps(unprocessed_item))) delete_request = request.get('DeleteRequest') if delete_request: if existing_items and len(existing_items) > i: keys = delete_request['Key'] if isinstance(keys, Response): return keys new_record = clone(record) new_record['eventID'] = short_uid() new_record['eventName'] = 'REMOVE' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['OldImage'] = existing_items[i] new_record['dynamodb']['SizeBytes'] = len( json.dumps(existing_items[i])) new_record[ 'eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) unprocessed_delete_items = self._thread_local( 'unprocessed_delete_items') if unprocessed_delete_items and len( unprocessed_delete_items) > i: unprocessed_item = unprocessed_delete_items[i] if unprocessed_item: unprocessed_items['DeleteRequest'].update( json.loads(json.dumps(unprocessed_item))) i += 1 return records, unprocessed_items
def update_dynamodb(method, path, data, headers, response=None, return_forward_info=False): if return_forward_info: return True # update table definitions if data and 'TableName' in data and 'KeySchema' in data: TABLE_DEFINITIONS[data['TableName']] = data action = headers.get('X-Amz-Target') if not action: return response_data = json.loads(response.text) record = { "eventID": "1", "eventVersion": "1.0", "dynamodb": { "StreamViewType": "NEW_AND_OLD_IMAGES", "SequenceNumber": "1", "SizeBytes": -1 }, "awsRegion": DEFAULT_REGION, "eventSource": "aws:dynamodb" } event = { 'Records': [record] } if action == 'DynamoDB_20120810.UpdateItem': req = {'TableName': data['TableName']} req['Key'] = data['Key'] new_item = aws_stack.dynamodb_get_item_raw(TEST_DYNAMODB_URL, req) if 'Item' not in new_item: if 'message' in new_item: print('WARNING: Unable to get item from DynamoDB: %s' % new_item['message']) return record['eventName'] = 'MODIFY' record['dynamodb']['Keys'] = data['Key'] record['dynamodb']['NewImage'] = new_item['Item'] elif action == 'DynamoDB_20120810.PutItem': record['eventName'] = 'INSERT' keys = dynamodb_extract_keys(item=data['Item'], table_name=data['TableName']) record['dynamodb']['Keys'] = keys record['dynamodb']['NewImage'] = data['Item'] elif action == 'DynamoDB_20120810.DeleteItem': record['eventName'] = 'REMOVE' record['dynamodb']['Keys'] = data['Key'] elif action == 'DynamoDB_20120810.CreateTable': if 'StreamSpecification' in data: stream = data['StreamSpecification'] enabled = stream['StreamEnabled'] if enabled: table_name = data['TableName'] view_type = stream['StreamViewType'] dynamodbstreams_api.add_dynamodb_stream(table_name=table_name, view_type=view_type, enabled=enabled) return else: # nothing to do return record['eventSourceARN'] = aws_stack.dynamodb_table_arn(data['TableName']) sources = lambda_api.get_event_sources(source_arn=record['eventSourceARN']) if len(sources) > 0: pass for src in sources: func_to_call = lambda_api.lambda_arn_to_function[src['FunctionArn']] lambda_api.run_lambda(func_to_call, event=event, context={})
def return_response(self, method, path, data, headers, response): if path.startswith('/shell') or method == 'GET': return data = json.loads(to_str(data)) # update table definitions if data and 'TableName' in data and 'KeySchema' in data: TABLE_DEFINITIONS[data['TableName']] = data if response._content: # fix the table and latest stream ARNs (DynamoDBLocal hardcodes "ddblocal" as the region) content_replaced = re.sub( r'("TableArn"|"LatestStreamArn"|"StreamArn")\s*:\s*"arn:aws:dynamodb:ddblocal:([^"]+)"', r'\1: "arn:aws:dynamodb:%s:\2"' % aws_stack.get_region(), to_str(response._content)) if content_replaced != response._content: response._content = content_replaced fix_headers_for_updated_response(response) action = headers.get('X-Amz-Target', '') action = action.replace(ACTION_PREFIX, '') if not action: return # upgrade event version to 1.1 record = { 'eventID': '1', 'eventVersion': '1.1', 'dynamodb': { 'ApproximateCreationDateTime': time.time(), 'StreamViewType': 'NEW_AND_OLD_IMAGES', 'SizeBytes': -1 }, 'awsRegion': aws_stack.get_region(), 'eventSource': 'aws:dynamodb' } records = [record] streams_enabled_cache = {} table_name = data.get('TableName') event_sources_or_streams_enabled = has_event_sources_or_streams_enabled( table_name, streams_enabled_cache) if action == 'UpdateItem': if response.status_code == 200 and event_sources_or_streams_enabled: existing_item = self._thread_local('existing_item') record[ 'eventName'] = 'INSERT' if not existing_item else 'MODIFY' record['eventID'] = short_uid() updated_item = find_existing_item(data) if not updated_item: return record['dynamodb']['Keys'] = data['Key'] if existing_item: record['dynamodb']['OldImage'] = existing_item record['dynamodb']['NewImage'] = updated_item record['dynamodb']['SizeBytes'] = len(json.dumps(updated_item)) stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record['dynamodb']['StreamViewType'] = stream_spec[ 'StreamViewType'] elif action == 'BatchWriteItem': records = self.prepare_batch_write_item_records(record, data) for record in records: event_sources_or_streams_enabled = ( event_sources_or_streams_enabled or has_event_sources_or_streams_enabled( record['eventSourceARN'], streams_enabled_cache)) elif action == 'TransactWriteItems': records = self.prepare_transact_write_item_records(record, data) for record in records: event_sources_or_streams_enabled = ( event_sources_or_streams_enabled or has_event_sources_or_streams_enabled( record['eventSourceARN'], streams_enabled_cache)) elif action == 'PutItem': if response.status_code == 200: keys = dynamodb_extract_keys(item=data['Item'], table_name=table_name) if isinstance(keys, Response): return keys # fix response if response._content == '{}': response._content = update_put_item_response_content( data, response._content) fix_headers_for_updated_response(response) if event_sources_or_streams_enabled: existing_item = self._thread_local('existing_item') # Get stream specifications details for the table stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) record[ 'eventName'] = 'INSERT' if not existing_item else 'MODIFY' # prepare record keys record['dynamodb']['Keys'] = keys record['dynamodb']['NewImage'] = data['Item'] record['dynamodb']['SizeBytes'] = len( json.dumps(data['Item'])) record['eventID'] = short_uid() if stream_spec: record['dynamodb']['StreamViewType'] = stream_spec[ 'StreamViewType'] if existing_item: record['dynamodb']['OldImage'] = existing_item elif action in ('GetItem', 'Query'): if response.status_code == 200: content = json.loads(to_str(response.content)) # make sure we append 'ConsumedCapacity', which is properly # returned by dynalite, but not by AWS's DynamoDBLocal if 'ConsumedCapacity' not in content and data.get( 'ReturnConsumedCapacity') in ['TOTAL', 'INDEXES']: content['ConsumedCapacity'] = { 'TableName': table_name, 'CapacityUnits': 5, # TODO hardcoded 'ReadCapacityUnits': 2, 'WriteCapacityUnits': 3 } response._content = json.dumps(content) fix_headers_for_updated_response(response) elif action == 'DeleteItem': if response.status_code == 200 and event_sources_or_streams_enabled: old_item = self._thread_local('existing_item') record['eventName'] = 'REMOVE' record['dynamodb']['Keys'] = data['Key'] record['dynamodb']['OldImage'] = old_item elif action == 'CreateTable': if 'StreamSpecification' in data: if response.status_code == 200: content = json.loads(to_str(response._content)) create_dynamodb_stream( data, content['TableDescription'].get('LatestStreamLabel')) event_publisher.fire_event( event_publisher.EVENT_DYNAMODB_CREATE_TABLE, payload={'n': event_publisher.get_hash(table_name)}) if data.get('Tags') and response.status_code == 200: table_arn = json.loads( response._content)['TableDescription']['TableArn'] TABLE_TAGS[table_arn] = { tag['Key']: tag['Value'] for tag in data['Tags'] } return elif action == 'DeleteTable': if response.status_code == 200: table_arn = json.loads(response._content).get( 'TableDescription', {}).get('TableArn') event_publisher.fire_event( event_publisher.EVENT_DYNAMODB_DELETE_TABLE, payload={'n': event_publisher.get_hash(table_name)}) self.delete_all_event_source_mappings(table_arn) dynamodbstreams_api.delete_streams(table_arn) TABLE_TAGS.pop(table_arn, None) return elif action == 'UpdateTable': if 'StreamSpecification' in data: if response.status_code == 200: content = json.loads(to_str(response._content)) create_dynamodb_stream( data, content['TableDescription'].get('LatestStreamLabel')) return elif action == 'TagResource': table_arn = data['ResourceArn'] if table_arn not in TABLE_TAGS: TABLE_TAGS[table_arn] = {} TABLE_TAGS[table_arn].update( {tag['Key']: tag['Value'] for tag in data.get('Tags', [])}) return elif action == 'UntagResource': table_arn = data['ResourceArn'] for tag_key in data.get('TagKeys', []): TABLE_TAGS.get(table_arn, {}).pop(tag_key, None) return else: # nothing to do return if event_sources_or_streams_enabled and records and 'eventName' in records[ 0]: if 'TableName' in data: records[0]['eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) forward_to_lambda(records) records = self.prepare_records_to_forward_to_ddb_stream(records) forward_to_ddb_stream(records)
def prepare_transact_write_item_records(self, record, data): records = [] # Fix issue #2745: existing_items only contain the Put/Update/Delete records, # so we will increase the index based on these events i = 0 for request in data['TransactItems']: put_request = request.get('Put') if put_request: existing_item = self._thread_local('existing_items')[i] table_name = put_request['TableName'] keys = dynamodb_extract_keys(item=put_request['Item'], table_name=table_name) if isinstance(keys, Response): return keys # Add stream view type to record if ddb stream is enabled stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record['dynamodb']['StreamViewType'] = stream_spec[ 'StreamViewType'] new_record = clone(record) new_record['eventID'] = short_uid() new_record[ 'eventName'] = 'INSERT' if not existing_item else 'MODIFY' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['NewImage'] = put_request['Item'] if existing_item: new_record['dynamodb']['OldImage'] = existing_item new_record['eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) new_record['dynamodb']['SizeBytes'] = len( json.dumps(put_request['Item'])) records.append(new_record) i += 1 update_request = request.get('Update') if update_request: table_name = update_request['TableName'] keys = update_request['Key'] if isinstance(keys, Response): return keys updated_item = find_existing_item(update_request, table_name) if not updated_item: return [] stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record['dynamodb']['StreamViewType'] = stream_spec[ 'StreamViewType'] new_record = clone(record) new_record['eventID'] = short_uid() new_record['eventName'] = 'MODIFY' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['OldImage'] = self._thread_local( 'existing_items')[i] new_record['dynamodb']['NewImage'] = updated_item new_record['eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) new_record['dynamodb']['SizeBytes'] = len( json.dumps(updated_item)) records.append(new_record) i += 1 delete_request = request.get('Delete') if delete_request: table_name = delete_request['TableName'] keys = delete_request['Key'] existing_item = self._thread_local('existing_items')[i] if isinstance(keys, Response): return keys stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record['dynamodb']['StreamViewType'] = stream_spec[ 'StreamViewType'] new_record = clone(record) new_record['eventID'] = short_uid() new_record['eventName'] = 'REMOVE' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['OldImage'] = existing_item new_record['dynamodb']['SizeBytes'] = len( json.dumps(existing_item)) new_record['eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) i += 1 return records
def return_response(self, method, path, data, headers, response): data = json.loads(to_str(data)) # update table definitions if data and 'TableName' in data and 'KeySchema' in data: TABLE_DEFINITIONS[data['TableName']] = data if response._content: # fix the table ARN (DynamoDBLocal hardcodes "ddblocal" as the region) content_replaced = re.sub( r'"TableArn"\s*:\s*"arn:aws:dynamodb:ddblocal:([^"]+)"', r'"TableArn": "arn:aws:dynamodb:%s:\1"' % aws_stack.get_local_region(), to_str(response._content)) if content_replaced != response._content: response._content = content_replaced fix_headers_for_updated_response(response) action = headers.get('X-Amz-Target') if not action: return record = { 'eventID': '1', 'eventVersion': '1.0', 'dynamodb': { 'StreamViewType': 'NEW_AND_OLD_IMAGES', 'SizeBytes': -1 }, 'awsRegion': DEFAULT_REGION, 'eventSource': 'aws:dynamodb' } records = [record] if action == '%s.UpdateItem' % ACTION_PREFIX: updated_item = find_existing_item(data) if not updated_item: return record['eventName'] = 'MODIFY' record['dynamodb']['Keys'] = data['Key'] record['dynamodb'][ 'OldImage'] = ProxyListenerDynamoDB.thread_local.existing_item record['dynamodb']['NewImage'] = updated_item record['dynamodb']['SizeBytes'] = len(json.dumps(updated_item)) elif action == '%s.BatchWriteItem' % ACTION_PREFIX: records = [] for table_name, requests in data['RequestItems'].items(): for request in requests: put_request = request.get('PutRequest') if put_request: keys = dynamodb_extract_keys(item=put_request['Item'], table_name=table_name) if isinstance(keys, Response): return keys new_record = clone(record) new_record['eventName'] = 'INSERT' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['NewImage'] = put_request[ 'Item'] new_record[ 'eventSourceARN'] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) elif action == '%s.PutItem' % ACTION_PREFIX: existing_item = ProxyListenerDynamoDB.thread_local.existing_item ProxyListenerDynamoDB.thread_local.existing_item = None record['eventName'] = 'INSERT' if not existing_item else 'MODIFY' keys = dynamodb_extract_keys(item=data['Item'], table_name=data['TableName']) if isinstance(keys, Response): return keys record['dynamodb']['Keys'] = keys record['dynamodb']['NewImage'] = data['Item'] record['dynamodb']['SizeBytes'] = len(json.dumps(data['Item'])) elif action == '%s.GetItem' % ACTION_PREFIX: if response.status_code == 200: content = json.loads(to_str(response.content)) # make sure we append 'ConsumedCapacity', which is properly # returned by dynalite, but not by AWS's DynamoDBLocal if 'ConsumedCapacity' not in content and data.get( 'ReturnConsumedCapacity') in ('TOTAL', 'INDEXES'): content['ConsumedCapacity'] = { 'CapacityUnits': 0.5, # TODO hardcoded 'TableName': data['TableName'] } response._content = json.dumps(content) fix_headers_for_updated_response(response) elif action == '%s.DeleteItem' % ACTION_PREFIX: record['eventName'] = 'REMOVE' record['dynamodb']['Keys'] = data['Key'] elif action == '%s.CreateTable' % ACTION_PREFIX: if 'StreamSpecification' in data: create_dynamodb_stream(data) event_publisher.fire_event( event_publisher.EVENT_DYNAMODB_CREATE_TABLE, payload={'n': event_publisher.get_hash(data['TableName'])}) return elif action == '%s.DeleteTable' % ACTION_PREFIX: event_publisher.fire_event( event_publisher.EVENT_DYNAMODB_DELETE_TABLE, payload={'n': event_publisher.get_hash(data['TableName'])}) return elif action == '%s.UpdateTable' % ACTION_PREFIX: if 'StreamSpecification' in data: create_dynamodb_stream(data) return else: # nothing to do return if len(records) > 0 and 'eventName' in records[0]: if 'TableName' in data: records[0]['eventSourceARN'] = aws_stack.dynamodb_table_arn( data['TableName']) forward_to_lambda(records) forward_to_ddb_stream(records)
def DynamoDB2_Table_get_cfn_attribute(self, attribute_name): if attribute_name == 'Arn': return aws_stack.dynamodb_table_arn(table_name=self.name)
def return_response(self, method, path, data, headers, response): if path.startswith('/shell'): return data = json.loads(to_str(data)) # update table definitions if data and 'TableName' in data and 'KeySchema' in data: TABLE_DEFINITIONS[data['TableName']] = data if response._content: # fix the table and latest stream ARNs (DynamoDBLocal hardcodes "ddblocal" as the region) content_replaced = re.sub( r'("TableArn"|"LatestStreamArn"|"StreamArn")\s*:\s*"arn:aws:dynamodb:' + 'ddblocal:([^"]+)"', r'\1: "arn:aws:dynamodb:%s:\2"' % aws_stack.get_region(), to_str(response._content)) if content_replaced != response._content: response._content = content_replaced fix_headers_for_updated_response(response) action = headers.get('X-Amz-Target') if not action: return record = { 'eventID': '1', 'eventVersion': '1.0', 'dynamodb': { 'StreamViewType': 'NEW_AND_OLD_IMAGES', 'SizeBytes': -1 }, 'awsRegion': aws_stack.get_region(), 'eventSource': 'aws:dynamodb' } records = [record] if action == '%s.UpdateItem' % ACTION_PREFIX: if response.status_code == 200: updated_item = find_existing_item(data) if not updated_item: return record['eventName'] = 'MODIFY' record['dynamodb']['Keys'] = data['Key'] record['dynamodb']['OldImage'] = self._thread_local( 'existing_item') record['dynamodb']['NewImage'] = updated_item record['dynamodb']['SizeBytes'] = len(json.dumps(updated_item)) elif action == '%s.BatchWriteItem' % ACTION_PREFIX: records = self.prepare_batch_write_item_records(record, data) elif action == '%s.TransactWriteItems' % ACTION_PREFIX: records = self.prepare_transact_write_item_records(record, data) elif action == '%s.PutItem' % ACTION_PREFIX: if response.status_code == 200: existing_item = self._thread_local('existing_item') record[ 'eventName'] = 'INSERT' if not existing_item else 'MODIFY' keys = dynamodb_extract_keys(item=data['Item'], table_name=data['TableName']) if isinstance(keys, Response): return keys # fix response if response._content == '{}': response._content = json.dumps( {'Attributes': data['Item']}) fix_headers_for_updated_response(response) # prepare record keys record['dynamodb']['Keys'] = keys record['dynamodb']['NewImage'] = data['Item'] record['dynamodb']['SizeBytes'] = len(json.dumps(data['Item'])) if existing_item: record['dynamodb']['OldImage'] = existing_item elif action == '%s.GetItem' % ACTION_PREFIX: if response.status_code == 200: content = json.loads(to_str(response.content)) # make sure we append 'ConsumedCapacity', which is properly # returned by dynalite, but not by AWS's DynamoDBLocal if 'ConsumedCapacity' not in content and data.get( 'ReturnConsumedCapacity') in ('TOTAL', 'INDEXES'): content['ConsumedCapacity'] = { 'CapacityUnits': 0.5, # TODO hardcoded 'TableName': data['TableName'] } response._content = json.dumps(content) fix_headers_for_updated_response(response) elif action == '%s.DeleteItem' % ACTION_PREFIX: if response.status_code == 200: old_item = self._thread_local('existing_item') record['eventName'] = 'REMOVE' record['dynamodb']['Keys'] = data['Key'] record['dynamodb']['OldImage'] = old_item elif action == '%s.CreateTable' % ACTION_PREFIX: if 'StreamSpecification' in data: if response.status_code == 200: content = json.loads(to_str(response._content)) create_dynamodb_stream( data, content['TableDescription'].get('LatestStreamLabel')) event_publisher.fire_event( event_publisher.EVENT_DYNAMODB_CREATE_TABLE, payload={'n': event_publisher.get_hash(data['TableName'])}) return elif action == '%s.DeleteTable' % ACTION_PREFIX: event_publisher.fire_event( event_publisher.EVENT_DYNAMODB_DELETE_TABLE, payload={'n': event_publisher.get_hash(data['TableName'])}) return elif action == '%s.UpdateTable' % ACTION_PREFIX: if 'StreamSpecification' in data: if response.status_code == 200: content = json.loads(to_str(response._content)) create_dynamodb_stream( data, content['TableDescription'].get('LatestStreamLabel')) return else: # nothing to do return if len(records) > 0 and 'eventName' in records[0]: if 'TableName' in data: records[0]['eventSourceARN'] = aws_stack.dynamodb_table_arn( data['TableName']) forward_to_lambda(records) forward_to_ddb_stream(records)
def update_dynamodb(method, path, data, headers, response=None, return_forward_info=False): if return_forward_info: if random.random() < config.DYNAMODB_ERROR_PROBABILITY: return dynamodb_error_response(data) return True # update table definitions if data and 'TableName' in data and 'KeySchema' in data: TABLE_DEFINITIONS[data['TableName']] = data action = headers.get('X-Amz-Target') if not action: return response_data = json.loads(to_str(response.content)) record = { "eventID": "1", "eventVersion": "1.0", "dynamodb": { "StreamViewType": "NEW_AND_OLD_IMAGES", "SequenceNumber": "1", "SizeBytes": -1 }, "awsRegion": DEFAULT_REGION, "eventSource": "aws:dynamodb" } records = [record] if action == 'DynamoDB_20120810.UpdateItem': req = {'TableName': data['TableName'], 'Key': data['Key']} new_item = aws_stack.dynamodb_get_item_raw(req) if 'Item' not in new_item: if 'message' in new_item: ddb_client = aws_stack.connect_to_service('dynamodb') table_names = ddb_client.list_tables()['TableNames'] msg = 'Unable to get item from DynamoDB (existing tables: %s): %s' % (table_names, new_item['message']) LOGGER.warning(msg) return record['eventName'] = 'MODIFY' record['dynamodb']['Keys'] = data['Key'] record['dynamodb']['NewImage'] = new_item['Item'] elif action == 'DynamoDB_20120810.BatchWriteItem': records = [] for table_name, requests in data['RequestItems'].items(): for request in requests: put_request = request.get('PutRequest') if put_request: keys = dynamodb_extract_keys(item=put_request['Item'], table_name=table_name) new_record = clone(record) new_record['eventName'] = 'INSERT' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['NewImage'] = put_request['Item'] new_record['eventSourceARN'] = aws_stack.dynamodb_table_arn(table_name) records.append(new_record) elif action == 'DynamoDB_20120810.PutItem': record['eventName'] = 'INSERT' keys = dynamodb_extract_keys(item=data['Item'], table_name=data['TableName']) record['dynamodb']['Keys'] = keys record['dynamodb']['NewImage'] = data['Item'] elif action == 'DynamoDB_20120810.DeleteItem': record['eventName'] = 'REMOVE' record['dynamodb']['Keys'] = data['Key'] elif action == 'DynamoDB_20120810.CreateTable': if 'StreamSpecification' in data: stream = data['StreamSpecification'] enabled = stream.get('StreamEnabled') if enabled not in [False, 'False']: table_name = data['TableName'] view_type = stream['StreamViewType'] dynamodbstreams_api.add_dynamodb_stream(table_name=table_name, view_type=view_type, enabled=enabled) return else: # nothing to do return if 'TableName' in data: record['eventSourceARN'] = aws_stack.dynamodb_table_arn(data['TableName']) for record in records: sources = lambda_api.get_event_sources(source_arn=record['eventSourceARN']) event = { 'Records': [record] } for src in sources: func_to_call = lambda_api.lambda_arn_to_function[src['FunctionArn']] lambda_api.run_lambda(func_to_call, event=event, context={}, func_arn=src['FunctionArn'])
def return_response(self, method, path, data, headers, response): if path.startswith("/shell") or method == "GET": return data = json.loads(to_str(data)) # update table definitions if data and "TableName" in data and "KeySchema" in data: table_definitions = DynamoDBRegion.get().table_definitions table_definitions[data["TableName"]] = data if response._content: # fix the table and latest stream ARNs (DynamoDBLocal hardcodes "ddblocal" as the region) content_replaced = re.sub( r'("TableArn"|"LatestStreamArn"|"StreamArn")\s*:\s*"arn:aws:dynamodb:ddblocal:([^"]+)"', r'\1: "arn:aws:dynamodb:%s:\2"' % aws_stack.get_region(), to_str(response._content), ) if content_replaced != response._content: response._content = content_replaced fix_headers_for_updated_response(response) action = headers.get("X-Amz-Target", "") action = action.replace(ACTION_PREFIX, "") if not action: return # upgrade event version to 1.1 record = { "eventID": "1", "eventVersion": "1.1", "dynamodb": { "ApproximateCreationDateTime": time.time(), # 'StreamViewType': 'NEW_AND_OLD_IMAGES', "SizeBytes": -1, }, "awsRegion": aws_stack.get_region(), "eventSource": "aws:dynamodb", } records = [record] streams_enabled_cache = {} table_name = data.get("TableName") event_sources_or_streams_enabled = has_event_sources_or_streams_enabled( table_name, streams_enabled_cache) if action == "UpdateItem": if response.status_code == 200 and event_sources_or_streams_enabled: existing_item = self._thread_local("existing_item") record[ "eventName"] = "INSERT" if not existing_item else "MODIFY" record["eventID"] = short_uid() updated_item = find_existing_item(data) if not updated_item: return record["dynamodb"]["Keys"] = data["Key"] if existing_item: record["dynamodb"]["OldImage"] = existing_item record["dynamodb"]["NewImage"] = updated_item record["dynamodb"]["SizeBytes"] = len(json.dumps(updated_item)) stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record["dynamodb"]["StreamViewType"] = stream_spec[ "StreamViewType"] elif action == "BatchWriteItem": records, unprocessed_items = self.prepare_batch_write_item_records( record, data) for record in records: event_sources_or_streams_enabled = ( event_sources_or_streams_enabled or has_event_sources_or_streams_enabled( record["eventSourceARN"], streams_enabled_cache)) if response.status_code == 200 and any(unprocessed_items): content = json.loads(to_str(response.content)) table_name = list(data["RequestItems"].keys())[0] if table_name not in content["UnprocessedItems"]: content["UnprocessedItems"][table_name] = [] for key in ["PutRequest", "DeleteRequest"]: if any(unprocessed_items[key]): content["UnprocessedItems"][table_name].append( {key: unprocessed_items[key]}) unprocessed = content["UnprocessedItems"] for key in list(unprocessed.keys()): if not unprocessed.get(key): del unprocessed[key] response._content = json.dumps(content) fix_headers_for_updated_response(response) elif action == "TransactWriteItems": records = self.prepare_transact_write_item_records(record, data) for record in records: event_sources_or_streams_enabled = ( event_sources_or_streams_enabled or has_event_sources_or_streams_enabled( record["eventSourceARN"], streams_enabled_cache)) elif action == "PutItem": if response.status_code == 200: keys = dynamodb_extract_keys(item=data["Item"], table_name=table_name) if isinstance(keys, Response): return keys # fix response if response._content == "{}": response._content = update_put_item_response_content( data, response._content) fix_headers_for_updated_response(response) if event_sources_or_streams_enabled: existing_item = self._thread_local("existing_item") # Get stream specifications details for the table stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) record[ "eventName"] = "INSERT" if not existing_item else "MODIFY" # prepare record keys record["dynamodb"]["Keys"] = keys record["dynamodb"]["NewImage"] = data["Item"] record["dynamodb"]["SizeBytes"] = len( json.dumps(data["Item"])) record["eventID"] = short_uid() if stream_spec: record["dynamodb"]["StreamViewType"] = stream_spec[ "StreamViewType"] if existing_item: record["dynamodb"]["OldImage"] = existing_item elif action in ("GetItem", "Query"): if response.status_code == 200: content = json.loads(to_str(response.content)) # make sure we append 'ConsumedCapacity', which is properly # returned by dynalite, but not by AWS's DynamoDBLocal if "ConsumedCapacity" not in content and data.get( "ReturnConsumedCapacity") in [ "TOTAL", "INDEXES", ]: content["ConsumedCapacity"] = { "TableName": table_name, "CapacityUnits": 5, # TODO hardcoded "ReadCapacityUnits": 2, "WriteCapacityUnits": 3, } response._content = json.dumps(content) fix_headers_for_updated_response(response) elif action == "DeleteItem": if response.status_code == 200 and event_sources_or_streams_enabled: old_item = self._thread_local("existing_item") record["eventID"] = short_uid() record["eventName"] = "REMOVE" record["dynamodb"]["Keys"] = data["Key"] record["dynamodb"]["OldImage"] = old_item record["dynamodb"]["SizeBytes"] = len(json.dumps(old_item)) # Get stream specifications details for the table stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record["dynamodb"]["StreamViewType"] = stream_spec[ "StreamViewType"] elif action == "CreateTable": if "StreamSpecification" in data: if response.status_code == 200: content = json.loads(to_str(response._content)) create_dynamodb_stream( data, content["TableDescription"].get("LatestStreamLabel")) event_publisher.fire_event( event_publisher.EVENT_DYNAMODB_CREATE_TABLE, payload={"n": event_publisher.get_hash(table_name)}, ) if data.get("Tags") and response.status_code == 200: table_arn = json.loads( response._content)["TableDescription"]["TableArn"] DynamoDBRegion.TABLE_TAGS[table_arn] = { tag["Key"]: tag["Value"] for tag in data["Tags"] } return elif action == "DeleteTable": if response.status_code == 200: table_arn = (json.loads(response._content).get( "TableDescription", {}).get("TableArn")) event_publisher.fire_event( event_publisher.EVENT_DYNAMODB_DELETE_TABLE, payload={"n": event_publisher.get_hash(table_name)}, ) self.delete_all_event_source_mappings(table_arn) dynamodbstreams_api.delete_streams(table_arn) DynamoDBRegion.TABLE_TAGS.pop(table_arn, None) return elif action == "UpdateTable": content_str = to_str(response._content or "") if response.status_code == 200 and "StreamSpecification" in data: content = json.loads(content_str) create_dynamodb_stream( data, content["TableDescription"].get("LatestStreamLabel")) if (response.status_code >= 400 and data.get("ReplicaUpdates") and "Nothing to update" in content_str): table_name = data.get("TableName") # update local table props (replicas) table_properties = DynamoDBRegion.get().table_properties table_properties[ table_name] = table_props = table_properties.get( table_name) or {} table_props["Replicas"] = replicas = table_props.get( "Replicas") or [] for repl_update in data["ReplicaUpdates"]: for key, details in repl_update.items(): region = details.get("RegionName") if key == "Create": details["ReplicaStatus"] = details.get( "ReplicaStatus") or "ACTIVE" replicas.append(details) if key == "Update": replica = [ r for r in replicas if r.get("RegionName") == region ] if replica: replica[0].update(details) if key == "Delete": table_props["Replicas"] = [ r for r in replicas if r.get("RegionName") != region ] # update response content schema = get_table_schema(table_name) result = {"TableDescription": schema["Table"]} update_response_content(response, json_safe(result), 200) return elif action == "DescribeTable": table_name = data.get("TableName") table_props = DynamoDBRegion.get().table_properties.get(table_name) if table_props: content = json.loads(to_str(response.content)) content.get("Table", {}).update(table_props) update_response_content(response, content) elif action == "TagResource": table_arn = data["ResourceArn"] table_tags = DynamoDBRegion.TABLE_TAGS if table_arn not in table_tags: table_tags[table_arn] = {} table_tags[table_arn].update( {tag["Key"]: tag["Value"] for tag in data.get("Tags", [])}) return elif action == "UntagResource": table_arn = data["ResourceArn"] for tag_key in data.get("TagKeys", []): DynamoDBRegion.TABLE_TAGS.get(table_arn, {}).pop(tag_key, None) return else: # nothing to do return if event_sources_or_streams_enabled and records and "eventName" in records[ 0]: if "TableName" in data: records[0]["eventSourceARN"] = aws_stack.dynamodb_table_arn( table_name) # forward to kinesis stream forward_to_kinesis_stream(records) # forward to lambda and ddb_streams forward_to_lambda(records) records = self.prepare_records_to_forward_to_ddb_stream(records) forward_to_ddb_stream(records)
def return_response(self, method, path, data, headers, response): data = json.loads(to_str(data)) # update table definitions if data and 'TableName' in data and 'KeySchema' in data: TABLE_DEFINITIONS[data['TableName']] = data if response._content: # fix the table ARN (DynamoDBLocal hardcodes "ddblocal" as the region) content_replaced = re.sub(r'"TableArn"\s*:\s*"arn:aws:dynamodb:ddblocal:([^"]+)"', r'"TableArn": "arn:aws:dynamodb:%s:\1"' % aws_stack.get_local_region(), to_str(response._content)) if content_replaced != response._content: response._content = content_replaced fix_headers_for_updated_response(response) action = headers.get('X-Amz-Target') if not action: return record = { 'eventID': '1', 'eventVersion': '1.0', 'dynamodb': { 'StreamViewType': 'NEW_AND_OLD_IMAGES', 'SizeBytes': -1 }, 'awsRegion': DEFAULT_REGION, 'eventSource': 'aws:dynamodb' } records = [record] if action == '%s.UpdateItem' % ACTION_PREFIX: updated_item = find_existing_item(data) if not updated_item: return record['eventName'] = 'MODIFY' record['dynamodb']['Keys'] = data['Key'] record['dynamodb']['OldImage'] = ProxyListenerDynamoDB.thread_local.existing_item record['dynamodb']['NewImage'] = updated_item record['dynamodb']['SizeBytes'] = len(json.dumps(updated_item)) elif action == '%s.BatchWriteItem' % ACTION_PREFIX: records = [] for table_name, requests in data['RequestItems'].items(): for request in requests: put_request = request.get('PutRequest') if put_request: keys = dynamodb_extract_keys(item=put_request['Item'], table_name=table_name) if isinstance(keys, Response): return keys new_record = clone(record) new_record['eventName'] = 'INSERT' new_record['dynamodb']['Keys'] = keys new_record['dynamodb']['NewImage'] = put_request['Item'] new_record['eventSourceARN'] = aws_stack.dynamodb_table_arn(table_name) records.append(new_record) elif action == '%s.PutItem' % ACTION_PREFIX: existing_item = ProxyListenerDynamoDB.thread_local.existing_item ProxyListenerDynamoDB.thread_local.existing_item = None record['eventName'] = 'INSERT' if not existing_item else 'MODIFY' keys = dynamodb_extract_keys(item=data['Item'], table_name=data['TableName']) if isinstance(keys, Response): return keys record['dynamodb']['Keys'] = keys record['dynamodb']['NewImage'] = data['Item'] record['dynamodb']['SizeBytes'] = len(json.dumps(data['Item'])) elif action == '%s.GetItem' % ACTION_PREFIX: if response.status_code == 200: content = json.loads(to_str(response.content)) # make sure we append 'ConsumedCapacity', which is properly # returned by dynalite, but not by AWS's DynamoDBLocal if 'ConsumedCapacity' not in content and data.get('ReturnConsumedCapacity') in ('TOTAL', 'INDEXES'): content['ConsumedCapacity'] = { 'CapacityUnits': 0.5, # TODO hardcoded 'TableName': data['TableName'] } response._content = json.dumps(content) fix_headers_for_updated_response(response) elif action == '%s.DeleteItem' % ACTION_PREFIX: old_item = ProxyListenerDynamoDB.thread_local.existing_item record['eventName'] = 'REMOVE' record['dynamodb']['Keys'] = data['Key'] record['dynamodb']['OldImage'] = old_item elif action == '%s.CreateTable' % ACTION_PREFIX: if 'StreamSpecification' in data: create_dynamodb_stream(data) event_publisher.fire_event(event_publisher.EVENT_DYNAMODB_CREATE_TABLE, payload={'n': event_publisher.get_hash(data['TableName'])}) return elif action == '%s.DeleteTable' % ACTION_PREFIX: event_publisher.fire_event(event_publisher.EVENT_DYNAMODB_DELETE_TABLE, payload={'n': event_publisher.get_hash(data['TableName'])}) return elif action == '%s.UpdateTable' % ACTION_PREFIX: if 'StreamSpecification' in data: create_dynamodb_stream(data) return else: # nothing to do return if len(records) > 0 and 'eventName' in records[0]: if 'TableName' in data: records[0]['eventSourceARN'] = aws_stack.dynamodb_table_arn(data['TableName']) forward_to_lambda(records) forward_to_ddb_stream(records)
def prepare_transact_write_item_records(self, record, data): records = [] # Fix issue #2745: existing_items only contain the Put/Update/Delete records, # so we will increase the index based on these events i = 0 for request in data["TransactItems"]: put_request = request.get("Put") if put_request: existing_item = self._thread_local("existing_items")[i] table_name = put_request["TableName"] keys = dynamodb_extract_keys(item=put_request["Item"], table_name=table_name) if isinstance(keys, Response): return keys # Add stream view type to record if ddb stream is enabled stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record["dynamodb"]["StreamViewType"] = stream_spec[ "StreamViewType"] new_record = clone(record) new_record["eventID"] = short_uid() new_record[ "eventName"] = "INSERT" if not existing_item else "MODIFY" new_record["dynamodb"]["Keys"] = keys new_record["dynamodb"]["NewImage"] = put_request["Item"] if existing_item: new_record["dynamodb"]["OldImage"] = existing_item new_record["eventSourceARN"] = aws_stack.dynamodb_table_arn( table_name) new_record["dynamodb"]["SizeBytes"] = len( json.dumps(put_request["Item"])) records.append(new_record) i += 1 update_request = request.get("Update") if update_request: table_name = update_request["TableName"] keys = update_request["Key"] if isinstance(keys, Response): return keys updated_item = find_existing_item(update_request, table_name) if not updated_item: return [] stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record["dynamodb"]["StreamViewType"] = stream_spec[ "StreamViewType"] new_record = clone(record) new_record["eventID"] = short_uid() new_record["eventName"] = "MODIFY" new_record["dynamodb"]["Keys"] = keys new_record["dynamodb"]["OldImage"] = self._thread_local( "existing_items")[i] new_record["dynamodb"]["NewImage"] = updated_item new_record["eventSourceARN"] = aws_stack.dynamodb_table_arn( table_name) new_record["dynamodb"]["SizeBytes"] = len( json.dumps(updated_item)) records.append(new_record) i += 1 delete_request = request.get("Delete") if delete_request: table_name = delete_request["TableName"] keys = delete_request["Key"] existing_item = self._thread_local("existing_items")[i] if isinstance(keys, Response): return keys stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record["dynamodb"]["StreamViewType"] = stream_spec[ "StreamViewType"] new_record = clone(record) new_record["eventID"] = short_uid() new_record["eventName"] = "REMOVE" new_record["dynamodb"]["Keys"] = keys new_record["dynamodb"]["OldImage"] = existing_item new_record["dynamodb"]["SizeBytes"] = len( json.dumps(existing_item)) new_record["eventSourceARN"] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) i += 1 return records
def prepare_batch_write_item_records(self, record, data): records = [] unprocessed_items = {"PutRequest": {}, "DeleteRequest": {}} i = 0 for table_name in sorted(data["RequestItems"].keys()): # Add stream view type to record if ddb stream is enabled stream_spec = dynamodb_get_table_stream_specification( table_name=table_name) if stream_spec: record["dynamodb"]["StreamViewType"] = stream_spec[ "StreamViewType"] for request in data["RequestItems"][table_name]: put_request = request.get("PutRequest") existing_items = self._thread_local("existing_items") if put_request: if existing_items and len(existing_items) > i: existing_item = existing_items[i] keys = dynamodb_extract_keys(item=put_request["Item"], table_name=table_name) if isinstance(keys, Response): return keys new_record = clone(record) new_record["eventID"] = short_uid() new_record["dynamodb"]["SizeBytes"] = len( json.dumps(put_request["Item"])) new_record[ "eventName"] = "INSERT" if not existing_item else "MODIFY" new_record["dynamodb"]["Keys"] = keys new_record["dynamodb"]["NewImage"] = put_request[ "Item"] if existing_item: new_record["dynamodb"]["OldImage"] = existing_item new_record[ "eventSourceARN"] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) unprocessed_put_items = self._thread_local( "unprocessed_put_items") if unprocessed_put_items and len( unprocessed_put_items) > i: unprocessed_item = unprocessed_put_items[i] if unprocessed_item: unprocessed_items["PutRequest"].update( json.loads(json.dumps(unprocessed_item))) delete_request = request.get("DeleteRequest") if delete_request: if existing_items and len(existing_items) > i: keys = delete_request["Key"] if isinstance(keys, Response): return keys new_record = clone(record) new_record["eventID"] = short_uid() new_record["eventName"] = "REMOVE" new_record["dynamodb"]["Keys"] = keys new_record["dynamodb"]["OldImage"] = existing_items[i] new_record["dynamodb"]["SizeBytes"] = len( json.dumps(existing_items[i])) new_record[ "eventSourceARN"] = aws_stack.dynamodb_table_arn( table_name) records.append(new_record) unprocessed_delete_items = self._thread_local( "unprocessed_delete_items") if unprocessed_delete_items and len( unprocessed_delete_items) > i: unprocessed_item = unprocessed_delete_items[i] if unprocessed_item: unprocessed_items["DeleteRequest"].update( json.loads(json.dumps(unprocessed_item))) i += 1 return records, unprocessed_items