def _alter(self, tree): """ Run an ALTER statement """ if tree.throughput: [read, write] = tree.throughput index = None if tree.index: index = tree.index self._update_throughput(tree.table, read, write, index) elif tree.drop_index: updates = [IndexUpdate.delete(tree.drop_index[0])] try: self.connection.update_table(tree.table, index_updates=updates) except DynamoDBError as e: if tree.exists and e.kwargs["Code"] == "ResourceNotFoundException": pass else: raise elif tree.create_index: # GlobalIndex attrs = {} index = self._parse_global_index(tree.create_index, attrs) updates = [IndexUpdate.create(index)] try: self.connection.update_table(tree.table, index_updates=updates) except DynamoDBError as e: if ( tree.not_exists and e.kwargs["Code"] == "ValidationException" and "already exists" in e.kwargs["Message"] ): pass else: raise else: raise SyntaxError("No alter command found")
def _alter(self, tree): """ Run an ALTER statement """ if tree.throughput: [read, write] = tree.throughput index = None if tree.index: index = tree.index self._update_throughput(tree.table, read, write, index) elif tree.drop_index: updates = [IndexUpdate.delete(tree.drop_index[0])] try: self.connection.update_table(tree.table, index_updates=updates) except DynamoDBError as e: if tree.exists and e.kwargs[ "Code"] == "ResourceNotFoundException": pass else: raise elif tree.create_index: # GlobalIndex attrs = {} index = self._parse_global_index(tree.create_index, attrs) updates = [IndexUpdate.create(index)] try: self.connection.update_table(tree.table, index_updates=updates) except DynamoDBError as e: if (tree.not_exists and e.kwargs["Code"] == "ValidationException" and "already exists" in e.kwargs["Message"]): pass else: raise else: raise SyntaxError("No alter command found")
def test_create_index(self): """ Create a global index """ hash_key = DynamoKey('id', data_type=STRING) self.dynamo.create_table('foobar', hash_key=hash_key) index_field = DynamoKey('name') index = GlobalIndex.all('name-index', index_field, hash_key) self.dynamo.update_table('foobar', index_updates=[ IndexUpdate.create(index)]) table = self.dynamo.describe_table('foobar') self.assertEqual(len(table.global_indexes), 1)
def test_create_index(self): """ Create a global index """ hash_key = DynamoKey('id', data_type=STRING) self.dynamo.create_table('foobar', hash_key=hash_key) index_field = DynamoKey('name') index = GlobalIndex.all('name-index', index_field, hash_key) self.dynamo.update_table('foobar', index_updates=[IndexUpdate.create(index)]) table = self.dynamo.describe_table('foobar') self.assertEqual(len(table.global_indexes), 1)
def test_create_index(self): """Create a global index""" hash_key = DynamoKey("id", data_type=STRING) self.dynamo.create_table("foobar", hash_key=hash_key) index_field = DynamoKey("name") index = GlobalIndex.all("name-index", index_field, hash_key) self.dynamo.update_table("foobar", index_updates=[IndexUpdate.create(index)]) table = self.dynamo.describe_table("foobar") assert table is not None self.assertEqual(len(table.global_indexes), 1)
def update_dynamo_schema(self, connection, test=False, wait=False, throughput=None, namespace=()): """ Updates all Dynamo table global indexes for this model Parameters ---------- connection : :class:`~dynamo3.DynamoDBConnection` test : bool, optional If True, don't actually create the table (default False) wait : bool, optional If True, block until table has been created (default False) throughput : dict, optional The throughput of the table and global indexes. Has the keys 'read' and 'write'. To specify throughput for global indexes, add the name of the index as a key and another 'read', 'write' dict as the value. namespace : str or tuple, optional The namespace of the table Returns ------- table : str Table name that altered, or None if nothing altered """ if self.abstract: return None tablename = self.ddb_tablename(namespace) global_indexes = [] for gindex in self.global_indexes: index = gindex.get_ddb_index(self.fields) if throughput is not None and gindex.name in throughput: index.throughput = Throughput(**throughput[gindex.name]) global_indexes.append(index) if not global_indexes: return None table = connection.describe_table(tablename) if not table: return None expected_indexes = {} for i in global_indexes: expected_indexes[i.name] = i actual_indexes = {} for i in table.global_indexes: actual_indexes[i.name] = i missing_index_names = set(expected_indexes.keys()) - set(actual_indexes.keys()) missing_indexes = [expected_indexes[i] for i in missing_index_names] updates = [IndexUpdate.create(index) for index in missing_indexes] update_indexes_name = set(expected_indexes.keys()) & set(actual_indexes.keys()) update_indexes = [expected_indexes[i] for i in update_indexes_name if actual_indexes[i].throughput != expected_indexes[i].throughput] updates.extend([IndexUpdate.update(index.name, index.throughput) for index in update_indexes]) if not updates: return None if not test: connection.update_table(tablename, index_updates=updates) if wait: desc = connection.describe_table(tablename) while desc.status != 'ACTIVE': time.sleep(1) desc = connection.describe_table(tablename) return tablename
def update_dynamo_schema(self, connection, test=False, wait=False, throughput=None, namespace=()): """ Updates all Dynamo table global indexes for this model Parameters ---------- connection : :class:`~dynamo3.DynamoDBConnection` test : bool, optional If True, don't actually create the table (default False) wait : bool, optional If True, block until table has been created (default False) throughput : dict, optional The throughput of the table and global indexes. Has the keys 'read' and 'write'. To specify throughput for global indexes, add the name of the index as a key and another 'read', 'write' dict as the value. namespace : str or tuple, optional The namespace of the table Returns ------- table : str Table name that altered, or None if nothing altered """ if self.abstract: return None tablename = self.ddb_tablename(namespace) global_indexes = [] for gindex in self.global_indexes: index = gindex.get_ddb_index(self.fields) if throughput is not None and gindex.name in throughput: index.throughput = Throughput(**throughput[gindex.name]) global_indexes.append(index) if not global_indexes: return None table = connection.describe_table(tablename) if not table: return None expected_indexes = {} for i in global_indexes: expected_indexes[i.name] = i actual_indexes = {} for i in table.global_indexes: actual_indexes[i.name] = i missing_index_names = set(expected_indexes.keys()) - set( actual_indexes.keys()) missing_indexes = [expected_indexes[i] for i in missing_index_names] updates = [IndexUpdate.create(index) for index in missing_indexes] update_indexes_name = set(expected_indexes.keys()) & set( actual_indexes.keys()) update_indexes = [ expected_indexes[i] for i in update_indexes_name if actual_indexes[i].throughput != expected_indexes[i].throughput ] updates.extend([ IndexUpdate.update(index.name, index.throughput) for index in update_indexes ]) if not updates: return None if not test: connection.update_table(tablename, index_updates=updates) if wait: desc = connection.describe_table(tablename) while desc.status != 'ACTIVE': time.sleep(1) desc = connection.describe_table(tablename) return tablename