def test_lsi_and_gsi_same_name(dynamodb): with pytest.raises(ClientError, match='ValidationException.*Duplicate'): table = create_test_table(dynamodb, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' }], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }, { 'AttributeName': 'x1', 'AttributeType': 'S' }, ], GlobalSecondaryIndexes=[{ 'IndexName': 'samename', 'KeySchema': [{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'x1', 'KeyType': 'RANGE' }], 'Projection': { 'ProjectionType': 'KEYS_ONLY' } }], LocalSecondaryIndexes=[{ 'IndexName': 'samename', 'KeySchema': [{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'x1', 'KeyType': 'RANGE' }], 'Projection': { 'ProjectionType': 'KEYS_ONLY' } }]) table.delete()
def test_table_lsi_gsi(dynamodb): table = create_test_table(dynamodb, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' }], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }, { 'AttributeName': 'x1', 'AttributeType': 'S' }, ], GlobalSecondaryIndexes=[{ 'IndexName': 'hello_g1', 'KeySchema': [{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'x1', 'KeyType': 'RANGE' }], 'Projection': { 'ProjectionType': 'KEYS_ONLY' } }], LocalSecondaryIndexes=[{ 'IndexName': 'hello_l1', 'KeySchema': [{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'x1', 'KeyType': 'RANGE' }], 'Projection': { 'ProjectionType': 'KEYS_ONLY' } }]) yield table table.delete()
def test_table_special_column_name(dynamodb): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': special_column_name1, 'KeyType': 'HASH' }, { 'AttributeName': special_column_name2, 'KeyType': 'RANGE' } ], AttributeDefinitions=[ { 'AttributeName': special_column_name1, 'AttributeType': 'S' }, { 'AttributeName': special_column_name2, 'AttributeType': 'S' }, ], ) yield table table.delete()
def create_gsi(dynamodb, index_name): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' }], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }], GlobalSecondaryIndexes=[ { 'IndexName': index_name, 'KeySchema': [{ 'AttributeName': 'p', 'KeyType': 'HASH' }], 'Projection': { 'ProjectionType': 'ALL' } } ]) # Verify that the GSI wasn't just ignored, as Scylla originally did ;-) assert 'GlobalSecondaryIndexes' in table.meta.client.describe_table(TableName=table.name)['Table'] table.delete()
def test_table_lsi_4(dynamodb): table = create_test_table(dynamodb, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' }], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }, { 'AttributeName': 'x1', 'AttributeType': 'S' }, { 'AttributeName': 'x2', 'AttributeType': 'S' }, { 'AttributeName': 'x3', 'AttributeType': 'S' }, { 'AttributeName': 'x4', 'AttributeType': 'S' }, ], LocalSecondaryIndexes=[{ 'IndexName': 'hello_' + column, 'KeySchema': [{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': column, 'KeyType': 'RANGE' }], 'Projection': { 'ProjectionType': 'ALL' } } for column in ['x1', 'x2', 'x3', 'x4']]) yield table table.delete()
def test_table_b(dynamodb): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' }, ], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'B' }]) yield table table.delete()
def test_table_streams_off(dynamodb): # If StreamSpecification is given, but has StreamEnabled=false, it's as # if StreamSpecification was missing. StreamViewType isn't needed. table = create_test_table(dynamodb, StreamSpecification={'StreamEnabled': False}, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }]) table.delete()
def test_gsi_backfill(dynamodb): # First create, and fill, a table without GSI. The items in items1 # will have the appropriate string type for 'x' and will later get # indexed. Items in item2 have no value for 'x', and in item3 'x' is in # not a string; So the items in items2 and items3 will be missing # in the index we'll create later. table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' } ], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' } ]) items1 = [{'p': random_string(), 'x': random_string(), 'y': random_string()} for i in range(10)] items2 = [{'p': random_string(), 'y': random_string()} for i in range(10)] items3 = [{'p': random_string(), 'x': i} for i in range(10)] items = items1 + items2 + items3 with table.batch_writer() as batch: for item in items: batch.put_item(item) assert multiset(items) == multiset(full_scan(table)) # Now use UpdateTable to create the GSI dynamodb.meta.client.update_table(TableName=table.name, AttributeDefinitions=[{ 'AttributeName': 'x', 'AttributeType': 'S' }], GlobalSecondaryIndexUpdates=[ { 'Create': { 'IndexName': 'hello', 'KeySchema': [{ 'AttributeName': 'x', 'KeyType': 'HASH' }], 'Projection': { 'ProjectionType': 'ALL' } }}]) # update_table is an asynchronous operation. We need to wait until it # finishes and the table is backfilled. wait_for_gsi(table, 'hello') # As explained above, only items in items1 got copied to the gsi, # and Scan on them works as expected. # Note that we don't need to retry the reads here (i.e., use the # assert_index_scan() or assert_index_query() functions) because after # we waited for backfilling to complete, we know all the pre-existing # data is already in the index. assert multiset(items1) == multiset(full_scan(table, IndexName='hello')) # We can also use Query on the new GSI, to search on the attribute x: assert multiset([items1[3]]) == multiset(full_query(table, IndexName='hello', KeyConditions={'x': {'AttributeValueList': [items1[3]['x']], 'ComparisonOperator': 'EQ'}})) # Let's also test that we cannot add another index with the same name # that already exists with pytest.raises(ClientError, match='ValidationException.*already exists'): dynamodb.meta.client.update_table(TableName=table.name, AttributeDefinitions=[{ 'AttributeName': 'y', 'AttributeType': 'S' }], GlobalSecondaryIndexUpdates=[ { 'Create': { 'IndexName': 'hello', 'KeySchema': [{ 'AttributeName': 'y', 'KeyType': 'HASH' }], 'Projection': { 'ProjectionType': 'ALL' } }}]) table.delete()
def test_table_streams_on(dynamodb): table = create_test_table(dynamodb, StreamSpecification={ 'StreamEnabled': True, 'StreamViewType': 'OLD_IMAGE' }, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }]) table.delete()
def test_table_tags(dynamodb): # The feature of creating a table already with tags was only added to # DynamoDB in April 2019, and to the botocore library in version 1.12.136 # https://aws.amazon.com/about-aws/whats-new/2019/04/now-you-can-tag-amazon-dynamodb-tables-when-you-create-them/ # so older versions of the library cannot run this test. import botocore if (Version(botocore.__version__) < Version('1.12.136')): pytest.skip("Botocore version 1.12.136 or above required to run this test") table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' } ], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'N' } ], Tags=PREDEFINED_TAGS) yield table table.delete()
def test_lsi_wrong_bad_range(dynamodb): with pytest.raises(ClientError, match='ValidationException.*same'): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' } ], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' } ], LocalSecondaryIndexes=[ { 'IndexName': 'hello', 'KeySchema': [ { 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'p', 'KeyType': 'RANGE' } ], 'Projection': { 'ProjectionType': 'ALL' } } ]) table.delete() with pytest.raises(ClientError, match='ValidationException.*'): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' } ], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }, ], LocalSecondaryIndexes=[ { 'IndexName': 'hello', 'KeySchema': [ { 'AttributeName': 'p', 'KeyType': 'HASH' } ], 'Projection': { 'ProjectionType': 'ALL' } } ]) table.delete()
def test_table_sse_off(dynamodb): # If StreamSpecification is given, but has StreamEnabled=false, it's as # if StreamSpecification was missing, and fine. No other attribues are # necessary. table = create_test_table(dynamodb, SSESpecification={'Enabled': False}, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }]) table.delete()
def test_table(dynamodb): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' } ], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }, ]) yield table # We get back here when this fixture is torn down. We ask Dynamo to delete # this table, but not wait for the deletion to complete. The next time # we create a test_table fixture, we'll choose a different table name # anyway. table.delete()
def test_table_streams_on(dynamodb): for type in ['OLD_IMAGE', 'NEW_IMAGE', 'KEYS_ONLY', 'NEW_AND_OLD_IMAGES']: table = create_test_table(dynamodb, StreamSpecification={ 'StreamEnabled': True, 'StreamViewType': type }, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }]) table.delete()
def test_table_gsi_2(dynamodb): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' } ], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'x', 'AttributeType': 'S' }, ], GlobalSecondaryIndexes=[ { 'IndexName': 'hello', 'KeySchema': [ { 'AttributeName': 'x', 'KeyType': 'HASH' }, ], 'Projection': { 'ProjectionType': 'ALL' } } ]) yield table table.delete()
def test_table_s_isolation_always(dynamodb): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' }, ], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }], Tags=[{ 'Key': 'system:write_isolation', 'Value': 'always' }]) yield table table.delete()
def test_table_sn(dynamodb): table = create_test_table(dynamodb, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' }], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'N' }]) yield table table.delete()
def filled_test_table(dynamodb): table = create_test_table(dynamodb, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' }], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }, ]) count = 164 items = [{ 'p': str(i), 'c': str(i), 'attribute': "x" * 7, 'another': "y" * 16 } for i in range(count)] items = items + [{ 'p': 'long', 'c': str(i), 'attribute': "x" * (1 + i % 7), 'another': "y" * (1 + i % 16) } for i in range(count)] items.append({ 'p': 'hello', 'c': 'world', 'str': 'and now for something completely different' }) with table.batch_writer() as batch: for item in items: batch.put_item(item) yield table, items table.delete()
def test_lsi_identical(dynamodb): table = create_test_table(dynamodb, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' }], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }], LocalSecondaryIndexes=[{ 'IndexName': 'hello', 'KeySchema': [{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' }], 'Projection': { 'ProjectionType': 'ALL' } }]) items = [{'p': random_string(), 'c': random_string()} for i in range(10)] with table.batch_writer() as batch: for item in items: batch.put_item(item) # Scanning the entire table directly or via the index yields the same # results (in different order). assert multiset(items) == multiset(full_scan(table)) assert_index_scan(table, 'hello', items) # We can't scan a non-existent index with pytest.raises(ClientError, match='ValidationException'): full_scan(table, IndexName='wrong') table.delete()
def test_table_ss_old_image_and_lsi(dynamodb, dynamodbstreams): table = create_test_table(dynamodb, KeySchema=[ {'AttributeName': 'p', 'KeyType': 'HASH'}, {'AttributeName': 'c', 'KeyType': 'RANGE'}], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }, { 'AttributeName': 'k', 'AttributeType': 'S' }], LocalSecondaryIndexes=[{ 'IndexName': 'index', 'KeySchema': [ {'AttributeName': 'p', 'KeyType': 'HASH'}, {'AttributeName': 'k', 'KeyType': 'RANGE'}], 'Projection': { 'ProjectionType': 'ALL' } }], StreamSpecification={ 'StreamEnabled': True, 'StreamViewType': 'OLD_IMAGE' }) arn = wait_for_active_stream(dynamodbstreams, table, timeout=60) yield table, arn table.delete()
def test_table_s_forbid_rmw(dynamodb, scylla_only): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' }, ], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }]) arn = table.meta.client.describe_table( TableName=table.name)['Table']['TableArn'] table.meta.client.tag_resource(ResourceArn=arn, Tags=[{ 'Key': 'system:write_isolation', 'Value': 'forbid_rmw' }]) yield table table.delete()
def test_table_gsi_random_name(dynamodb): index_name = random_string() table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' } ], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }, ], GlobalSecondaryIndexes=[ { 'IndexName': index_name, 'KeySchema': [ { 'AttributeName': 'c', 'KeyType': 'HASH' }, { 'AttributeName': 'p', 'KeyType': 'RANGE' }, ], 'Projection': { 'ProjectionType': 'ALL' } } ], ) yield [table, index_name] table.delete()
def test_gsi_projection_keys_only(dynamodb): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' } ], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'x', 'AttributeType': 'S' }, ], GlobalSecondaryIndexes=[ { 'IndexName': 'hello', 'KeySchema': [ { 'AttributeName': 'x', 'KeyType': 'HASH' }, ], 'Projection': { 'ProjectionType': 'KEYS_ONLY' } } ]) items = [{'p': random_string(), 'x': random_string(), 'y': random_string()} for i in range(10)] with table.batch_writer() as batch: for item in items: batch.put_item(item) wanted = ['p', 'x'] expected_items = [{k: x[k] for k in wanted if k in x} for x in items] assert_index_scan(table, 'hello', expected_items) table.delete()
def create_stream_test_table(dynamodb, StreamViewType=None): spec = {'StreamEnabled': False} if StreamViewType != None: spec = {'StreamEnabled': True, 'StreamViewType': StreamViewType} table = create_test_table(dynamodb, StreamSpecification=spec, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' }], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'S' }, ]) yield table while True: try: table.delete() return except ClientError as ce: # if the table has a stream currently being created we cannot # delete the table immediately. Again, only with real dynamo if ce.response['Error']['Code'] == 'ResourceInUseException': print('Could not delete table yet. Sleeping 5s.') time.sleep(5) continue raise