def update_capacity(self, dynamo_host, dynamo_region, table_root, logger, config): """ Manage the DynamoDB tables: - Create if the tables don't exist - Update the read/write capacity if they do exist and there is a conflict between required and current :param dynamo_host: optional host, for local testing :param dynamo_region: aws region for the table :param table_root: prefix for table_name (e.g. flyby) :param logger: :return: """ models = [BackendModel, ServiceModel, TargetGroupModel, ResolverModel] for model in models: if dynamo_host: model.Meta.host = dynamo_host conn = Connection(host=dynamo_host) else: conn = Connection(region=dynamo_region) default_table_name = model.Meta.table_name if not model.Meta.table_name.startswith(table_root): model.Meta.table_name = "{0}-{1}".format(table_root, model.Meta.table_name) model.Meta.region = dynamo_region if not model.exists(): logger.info("Creating {} table".format(model.Meta.table_name)) read_capacity_units = self.return_capacity(default_table_name, config)['read_capacity_units'] write_capacity_units = self.return_capacity(default_table_name, config)['write_capacity_units'] model.create_table(read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, wait=True ) else: table_name = model.Meta.table_name table_capacity = self.capacity_check(default_table_name, table_name, conn, config) if not table_capacity['result'] and table_capacity['decreases'] < 4: conn.update_table( table_name=model.Meta.table_name, read_capacity_units=table_capacity['read'], write_capacity_units=table_capacity['write'] ) logger.info("Updating {} table read/write capacity".format(model.Meta.table_name)) elif not table_capacity['result'] and table_capacity['write'] >= 4: logger.error("Unable to decrease capacity on {} table".format(model.Meta.table_name))
def test_connection_integration(ddb_url): table_name = 'pynamodb-ci-connection' # For use with a fake dynamodb connection # See: http://aws.amazon.com/dynamodb/developer-resources/ conn = Connection(host=ddb_url) print(conn) print("conn.describe_table...") table = None try: table = conn.describe_table(table_name) except TableDoesNotExist: params = { 'read_capacity_units': 1, 'write_capacity_units': 1, 'attribute_definitions': [{ 'attribute_type': STRING, 'attribute_name': 'Forum' }, { 'attribute_type': STRING, 'attribute_name': 'Thread' }, { 'attribute_type': STRING, 'attribute_name': 'AltKey' }, { 'attribute_type': NUMBER, 'attribute_name': 'number' }], 'key_schema': [{ 'key_type': HASH, 'attribute_name': 'Forum' }, { 'key_type': RANGE, 'attribute_name': 'Thread' }], 'global_secondary_indexes': [{ 'index_name': 'alt-index', 'key_schema': [{ 'KeyType': 'HASH', 'AttributeName': 'AltKey' }], 'projection': { 'ProjectionType': 'KEYS_ONLY' }, 'provisioned_throughput': { 'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1, } }], 'local_secondary_indexes': [{ 'index_name': 'view-index', 'key_schema': [{ 'KeyType': 'HASH', 'AttributeName': 'Forum' }, { 'KeyType': 'RANGE', 'AttributeName': 'AltKey' }], 'projection': { 'ProjectionType': 'KEYS_ONLY' } }] } print("conn.create_table...") conn.create_table(table_name, **params) while table is None: time.sleep(1) table = conn.describe_table(table_name) while table['TableStatus'] == 'CREATING': time.sleep(2) table = conn.describe_table(table_name) print("conn.list_tables") conn.list_tables() print("conn.update_table...") conn.update_table(table_name, read_capacity_units=table.get( PROVISIONED_THROUGHPUT).get(READ_CAPACITY_UNITS) + 1, write_capacity_units=2) table = conn.describe_table(table_name) while table['TableStatus'] != 'ACTIVE': time.sleep(2) table = conn.describe_table(table_name) print("conn.put_item") conn.put_item( table_name, 'item1-hash', range_key='item1-range', attributes={'foo': { 'S': 'bar' }}, condition=NotExists(Path('Forum')), ) conn.get_item(table_name, 'item1-hash', range_key='item1-range') conn.delete_item(table_name, 'item1-hash', range_key='item1-range') items = [] for i in range(10): items.append({"Forum": "FooForum", "Thread": f"thread-{i}"}) print("conn.batch_write_items...") conn.batch_write_item(table_name, put_items=items) print("conn.batch_get_items...") data = conn.batch_get_item(table_name, items) print("conn.query...") conn.query( table_name, "FooForum", range_key_condition=(BeginsWith(Path('Thread'), Value('thread'))), ) print("conn.scan...") conn.scan(table_name, ) print("conn.delete_table...") conn.delete_table(table_name)
def test_update_table(self): """ Connection.update_table """ with patch(PATCH_METHOD) as req: req.return_value = HttpOK(), None conn = Connection(self.region) params = { 'provisioned_throughput': { 'WriteCapacityUnits': 2, 'ReadCapacityUnits': 2 }, 'table_name': 'ci-table' } conn.update_table( self.test_table_name, read_capacity_units=2, write_capacity_units=2 ) self.assertEqual(req.call_args[1], params) self.assertRaises(ValueError, conn.update_table, self.test_table_name, read_capacity_units=2) with patch(PATCH_METHOD) as req: req.return_value = HttpBadRequest(), None conn = Connection(self.region) self.assertRaises( TableError, conn.update_table, self.test_table_name, read_capacity_units=2, write_capacity_units=2) with patch(PATCH_METHOD) as req: req.return_value = HttpOK(), None conn = Connection(self.region) global_secondary_index_updates = [ { "index_name": "foo-index", "read_capacity_units": 2, "write_capacity_units": 2 } ] params = { 'table_name': 'ci-table', 'provisioned_throughput': { 'ReadCapacityUnits': 2, 'WriteCapacityUnits': 2, }, 'global_secondary_index_updates': [ { 'Update': { 'IndexName': 'foo-index', 'ProvisionedThroughput': { 'ReadCapacityUnits': 2, 'WriteCapacityUnits': 2, } } } ] } conn.update_table( self.test_table_name, read_capacity_units=2, write_capacity_units=2, global_secondary_index_updates=global_secondary_index_updates ) self.assertEqual(req.call_args[1], params)
# By default, PynamoDB will connect to the us-east-1 region, but you can specify a different one. conn = Connection(region='us-west-1') # Modifying tables # You can easily list tables: conn.list_tables() # or delete a table: # conn.delete_table('Thread') # If you want to change the capacity of a table, that can be done as well: conn.update_table('Thread', read_capacity_units=20, write_capacity_units=20) # You can create tables as well, although the syntax is verbose. You should really use the model API instead, # but here is a low level example to demonstrate the point: kwargs = { 'write_capacity_units': 1, 'read_capacity_units': 1, 'attribute_definitions': [{ 'attribute_type': 'S', 'attribute_name': 'key1' }, { 'attribute_type': 'S', 'attribute_name': 'key2'
conn.create_table(table_name, **params) while table is None: time.sleep(2) table = conn.describe_table(table_name) while table['TableStatus'] == 'CREATING': time.sleep(2) table = conn.describe_table(table_name) print("conn.list_tables") conn.list_tables() print("conn.update_table...") conn.update_table( table_name, read_capacity_units=table.get(PROVISIONED_THROUGHPUT).get(READ_CAPACITY_UNITS) + 1, write_capacity_units=2 ) table = conn.describe_table(table_name) while table['TableStatus'] != 'ACTIVE': time.sleep(2) table = conn.describe_table(table_name) print("conn.put_item") conn.put_item( table_name, 'item1-hash', range_key='item1-range', attributes={'foo': {'S': 'bar'}},
print("conn.create_table...") conn.create_table(table_name, **params) while table is None: time.sleep(2) table = conn.describe_table(table_name) while table['TableStatus'] == 'CREATING': time.sleep(2) table = conn.describe_table(table_name) print("conn.list_tables") conn.list_tables() print("conn.update_table...") conn.update_table(table_name, read_capacity_units=table.get(PROVISIONED_THROUGHPUT).get( READ_CAPACITY_UNITS) + 1, write_capacity_units=2) table = conn.describe_table(table_name) while table['TableStatus'] != 'ACTIVE': time.sleep(2) table = conn.describe_table(table_name) print("conn.put_item") conn.put_item(table_name, 'item1-hash', range_key='item1-range', attributes={'foo': { 'S': 'bar' }},