def make_multi(populate, group_by): ts = TableStore() table = ts.add_table('multikey') table.add_primary_key('key1,key2,key3') table.set_row_as_file(group_by=group_by) if populate: table.add({'key1': 1, 'key2': 1, 'key3': 1}) table.add({'key1': 1, 'key2': 1, 'key3': 2}) return ts
def test_schema(self): ts = TableStore() table = ts.add_table('test-table') table.add_primary_key('id') table.add_schema({ 'type': 'object', 'properties': { 'a_string': { 'type': 'string' }, 'a_pattern': { 'pattern': r'^([a-z\d-]){1,25}$' }, }, 'required': ['a_pattern'], }) # Check 'required' rule. with self.assertRaises(jsonschema.ValidationError) as context: table.add({'id': 123}) self.assertIn("'a_pattern' is a required property", str(context.exception)) # Check 'pattern' rule. with self.assertRaises(jsonschema.ValidationError) as context: table.add({'id': 123, 'a_pattern': 'not conforming'}) self.assertIn("Schema check failed", str(context.exception)) # Define a default value for the required property and make sure we pass. table.add_default_values({'a_pattern': 'some-value'}) table.add({'id': 123}) # Test schema on document single = ts.add_table('single', single_row=True) single.add_schema({ 'type': 'object', 'properties': { 'a_string': { 'type': 'string' }, 'a_pattern': { 'pattern': r'^([a-z\d-]){1,25}$' }, }, 'required': ['a_pattern'], }) doc = single.add({'a_string': 'aaa', 'a_pattern': 'abc'}) # Check 'pattern' rule. with self.assertRaises(jsonschema.ValidationError) as context: doc = single.add({'a_pattern': 'not conforming'}) self.assertIn("Schema check failed", str(context.exception))
def make_store(populate, row_as_file=None): """Return a table store with two tables and populate if needed.""" ts = TableStore() # Set up two tables using single field constraints table1 = ts.add_table('continents') table1.add_primary_key('continent_id') table1.add_unique_constraint('name') table2 = ts.add_table('countries') table2.add_primary_key('country_code') table2.add_unique_constraint('name') table2.add_foreign_key('continent_id', 'continents') if row_as_file: table2.set_row_as_file(subfolder_name=table2.name) if populate: # Not a complete list of continents and its countries. table1.add({'continent_id': 1, 'name': 'Africa'}) table1.add({'continent_id': 2, 'name': 'Asia'}) table1.add({'continent_id': 3, 'name': 'Europe'}) table2.add({'country_code': 'sd', 'name': 'Sudan', 'continent_id': 1}) table2.add({'country_code': 'ke', 'name': 'Kenya', 'continent_id': 1}) table2.add({'country_code': 'gn', 'name': 'Guynea', 'continent_id': 1}) table2.add({'country_code': 'jp', 'name': 'Japan', 'continent_id': 2}) table2.add({ 'country_code': 'vn', 'name': 'Vietnam', 'continent_id': 2 }) table2.add({ 'country_code': 'is', 'name': 'Iceland', 'continent_id': 3 }) return ts
def test_constraints(self): ts = TableStore() # Test badly formatted table name with self.assertRaises(TableError) as context: ts.add_table('bad name') self.assertIn("didn't match pattern", str(context.exception)) # Set up two tables using single field constraints table1 = ts.add_table('table1') table1.add_primary_key('pk_field') table1.add_unique_constraint('unique_field') # Note, constraints are verified in the order they are added to the table. # Test missing primary key field with self.assertRaises(ConstraintError) as context: table1.add({'bogus_field': 'dummy'}) # Test successfull row inserts row1 = table1.add({ 'pk_field': 1, 'unique_field': 'iamunique1', 'tag': 'red' }) row2 = table1.add({ 'pk_field': 2, 'unique_field': 'iamunique2', 'tag': 'blue' }) # Test default values and their immutability mutable = [{'a_list_item': 1}] default_values = {'default_value': 'some_value', 'list': mutable} table1.add_default_values(default_values) # Mutate it a bit del default_values['default_value'] mutable.append('this should not appear') row3 = table1.add({ 'pk_field': 3, 'unique_field': 'iamunique3', 'tag': 'blue' }) self.assertEqual(row3.get('default_value'), 'some_value') self.assertEqual(row3.get('list'), [{'a_list_item': 1}]) # Test badly formatted primary key with self.assertRaises(ConstraintError) as context: table1.add({ 'pk_field': 'no good', 'unique_field': 'x' }) # Can't have spaces self.assertIn("didn't match pattern", str(context.exception)) # Test primary key violation with self.assertRaises(ConstraintError) as context: table1.add({'pk_field': 1, 'unique_field': 'somethingelse'}) self.assertIn("Primary key violation", str(context.exception)) # Test unique constraint violation with self.assertRaises(ConstraintError) as context: table1.add({'pk_field': 4, 'unique_field': 'iamunique1'}) self.assertIn("Unique constraint violation", str(context.exception)) # Test bad lookup with self.assertRaises(TableError) as context: table1.get({'not_a_pk_field': 1}) self.assertIn("can't make primary key", str(context.exception)) # Test lookup using primary key self.assertIs(row1, table1.get({'pk_field': 1})) self.assertIs(row2, table1.get({'pk_field': 2})) self.assertIs(row3, table1.get({'pk_field': 3})) # Test lookup using the row itself self.assertIs(row1, table1.get(row1)) self.assertIs(row2, table1.get(row2)) self.assertIs(row3, table1.get(row3)) # Test lookup using search criteria for the whole row. self.assertEqual([row1], table1.find(row1)) self.assertEqual([row2], table1.find(row2)) self.assertEqual([row3], table1.find(row3)) # Test lookup using no criteria. Should return all rows. self.assertItemsEqual([row1, row2, row3], table1.find()) # Test lookup using unique field as search criteria self.assertEqual([row1], table1.find({'unique_field': 'iamunique1'})) self.assertEqual([row2], table1.find({'unique_field': 'iamunique2'})) self.assertEqual([row3], table1.find({'unique_field': 'iamunique3'})) # Test lookup on non-distinct fields self.assertEqual([row1], table1.find({'tag': 'red'})) self.assertItemsEqual([row2, row3], table1.find({'tag': 'blue'})) # Test foreign key relationship. Previously inserted rows are not re-checked # automatically. table2 = ts.add_table('table2') table2.add_primary_key('pk_field') table1.add_foreign_key('tag', 'table2', 'pk_field') blue_row = table2.add({'pk_field': 'blue'}) # Test adding bogus foreign key relationship. with self.assertRaises(ConstraintError) as context: table1.add_foreign_key('bogus_field', 'table2') # Field name should not match with self.assertRaises(ConstraintError) as context: table1.add_foreign_key('bogus_field', 'table2', 'still_bogus_field') # Test adding row with foreign key check table1.add({ 'pk_field': 4, 'unique_field': 'iamunique4', 'tag': 'blue' }) # Test adding row with foreign key violation with self.assertRaises(ConstraintError) as context: table1.add({ 'pk_field': 5, 'unique_field': 'iamunique5', 'tag': 'burgundy' }) self.assertIn("foreign key record in 'table2' not found", str(context.exception)) # Get foreign table row. Only 'blue' row is linked. The 'red' one is orphaned. self.assertEqual(None, table1.get_foreign_row(row1, 'table2')) self.assertEqual(blue_row, table1.get_foreign_row(row2, 'table2')) self.assertEqual(blue_row, table1.get_foreign_row(row3, 'table2')) # Test bad table name in request for foreign row with self.assertRaises(TableError) as context: table1.get_foreign_row(row1, 'no_table') self.assertIn("No foreign key relationship found", str(context.exception)) # Test combined keys and single row table. table3 = ts.add_table('table3') table3.add_primary_key('pk_field1,pk_field2') table3.add_unique_constraint('unique_field1,unique_field2') # Test foreign key relationship on own table, and with different ordered field names. table3.add_foreign_key('foreign_field1,foreign_field2', 'table3', 'unique_field2,unique_field1') # Test row inserts row1 = table3.add({ 'pk_field1': 1, 'pk_field2': 'x', 'unique_field1': 'u1', 'unique_field2': 'x' }) # Test unique fields constraint with self.assertRaises(ConstraintError) as context: row2 = table3.add({ 'pk_field1': 2, 'pk_field2': 'x', 'unique_field1': 'u1', 'unique_field2': 'x' }) # Test foreign key. Link to self. row2 = table3.add({ 'pk_field1': 2, 'pk_field2': 'x', 'unique_field1': 'u2', 'unique_field2': 'x', 'foreign_field1': 'u1', 'foreign_field2': 'x', }) # Test foreign key violation. with self.assertRaises(ConstraintError) as context: row2 = table3.add({ 'pk_field1': 3, 'pk_field2': 'x', 'unique_field1': 'u3', 'unique_field2': 'x', 'foreign_field1': 'bork', 'foreign_field2': 'x', }) self.assertIn("foreign key record in 'table3' not found", str(context.exception)) # Test all cases with a single row table single = ts.add_table('single', single_row=True) #single.add_primary_key('pk_field1,pk_field2') single.add_unique_constraint('unique_field1,unique_field2') # Test foreign key relationship on own table, and with different ordered field names. single.add_foreign_key('foreign_field1,foreign_field2', 'table3', 'unique_field2,unique_field1') # Test doc insert and remove doc = single.add({ 'pk_field1': 1, 'pk_field2': 'x', 'unique_field1': 'u1', 'unique_field2': 'x' }) single.remove(doc) # Test foreign key. Link to self. doc = single.add({ 'pk_field1': 1, 'pk_field2': 'x', 'unique_field1': 'u2', 'unique_field2': 'x', 'foreign_field1': 'u1', 'foreign_field2': 'x', }) # Test foreign key violation. with self.assertRaises(ConstraintError) as context: doc = single.add({ 'pk_field1': 3, 'pk_field2': 'x', 'unique_field1': 'u3', 'unique_field2': 'x', 'foreign_field1': 'bork', 'foreign_field2': 'x', }) self.assertIn("foreign key record in 'table3' not found", str(context.exception)) # Clear table store ts.clear()
def test_tablestore_definition(self): # Test serializing the definition or meta-data of the table store and tables. ts = make_store(populate=False) definition = ts.get_definition() new_ts = TableStore() new_ts.init_from_definition(definition)
def test_find_references(self): ts = TableStore() # Make three tables which all have master-detail relationship and use # aliased combined foreign keys for extra points. # Master table contains a combinded primary key, master_id2 and master_id2. # Middle table has foreign key relationship to master table primary keys but with # aliased field names. # Detail table has foreign key relationship to middle table using a unique field # in the middle table, as opposed to the primary key. It also has a reference to # itself. ts = make_store(populate=True) t1 = ts.add_table('master') t1.add_primary_key('master_id1,master_id2') t1r1 = t1.add({ 'master_id1': 1, 'master_id2': 'a' }) # Has two 'middle' rows referencing it. t1r2 = t1.add({ 'master_id1': 2, 'master_id2': 'b' }) # Has one 'middle' row references to it. t1r3 = t1.add({ 'master_id1': 3, 'master_id2': 'c' }) # Has no foreign references to it. t2 = ts.add_table('middle') t2.add_primary_key('middle_id') t2.add_foreign_key('m1,m2', 'master', 'master_id1,master_id2') t2.add_unique_constraint('middle_unique_id') t2r1 = t2.add({ 'middle_id': 51, 'm1': 1, 'm2': 'a', 'middle_unique_id': 'unique_51' }) # Has two 'detail' row refs. t2r2 = t2.add({ 'middle_id': 52, 'm1': 1, 'm2': 'a', 'middle_unique_id': 'unique_52' }) # Has two 'detail' row refs. t2r3 = t2.add({ 'middle_id': 53, 'm1': 2, 'm2': 'b', 'middle_unique_id': 'unique_53' }) # Has two 'detail' row refs. t3 = ts.add_table('detail') t3.add_primary_key('detail_id') t3.add_foreign_key('middle_unique_id', 'middle') t3.add_foreign_key('other_detail_id', 'detail', 'detail_id') # This one references unique field "middle.unique_id=51" as well as itself t3r1 = t3.add({ 'detail_id': 100, 'middle_unique_id': 'unique_51', 'other_detail_id': 100 }) # This one references unique field "middle.unique_id=51" as well as the row above. t3r2 = t3.add({ 'detail_id': 101, 'middle_unique_id': 'unique_51', 'other_detail_id': 100 }) # This one references unique field "middle.unique_id=53" but the row above as well. t3r3 = t3.add({ 'detail_id': 102, 'middle_unique_id': 'unique_53', 'other_detail_id': 101 }) # These ones reference unique field "middle.unique_id=53" and nothing else, and are # the only one that survive the cascading delete. t3r4 = t3.add({'detail_id': 103, 'middle_unique_id': 'unique_53'}) t3r5 = t3.add({'detail_id': 104, 'middle_unique_id': 'unique_53'}) ts.check_integrity() result = t1.find_references(t1r1) # Only rows from 'middle' and 'detail' tables should be expected self.assertItemsEqual(result.keys(), ['middle', 'detail']) # First two of three rows in 'middle' should be expected. self.assertEqual(len(result['middle']), 2) self.assertIn(t2r1, result['middle']) self.assertIn(t2r2, result['middle']) self.assertNotIn(t2r3, result['middle']) # First three of five rows in 'detail' should be expected. self.assertEqual(len(result['detail']), 3) self.assertIn(t3r1, result['detail']) self.assertIn(t3r2, result['detail']) self.assertIn(t3r3, result['detail']) self.assertNotIn(t3r4, result['detail']) self.assertNotIn(t3r5, result['detail']) # Delete top row and expect problems t1.remove(t1r1) with self.assertRaises(TableError): ts.check_integrity() # Do cascading delete and expect success. for table_name, rows in result.items(): for row in rows: ts.get_table(table_name).remove(row) ts.check_integrity()
def get_drift_table_store(): """ Create a Data Store which contains all Core Drift tables. """ # RULE: pk='tier_name'='LIVENORTH', role['liveops', 'admin', 'service'] ts = TableStore() domain = ts.add_table('domain', single_row=True) domain.add_schema({ 'type': 'object', 'properties': { 'domain_name': { 'type': 'string' }, 'display_name': { 'type': 'string' }, 'origin': { 'type': 'string' }, }, 'required': ['domain_name', 'origin'], }) domain.add_default_values({'domain_name': '', 'origin': ''}) organizations = ts.add_table('organizations') organizations.add_primary_key('organization_name') organizations.add_unique_constraint('short_name') organizations.add_schema({ 'type': 'object', 'properties': { 'organization_name': { 'pattern': r'^([a-z0-9]){2,20}$' }, 'short_name': { 'pattern': r'^([a-z0-9]){2,20}$' }, 'display_name': { 'type': 'string' }, 'state': { 'enum': ['initializing', 'active', 'disabled', 'deleted'] }, }, 'required': ['short_name'], }) organizations.add_default_values({'state': 'active'}) tiers = ts.add_table('tiers') tiers.add_primary_key('tier_name') tiers.add_schema({ 'type': 'object', 'properties': { 'tier_name': { 'pattern': r'^([A-Z]){3,20}$' }, 'is_live': { 'type': 'boolean' }, 'state': { 'enum': ['initializing', 'active', 'disabled', 'deleted'] }, }, 'required': ['is_live'], }) tiers.add_default_values({'is_live': True, 'state': 'active'}) deployable_names = ts.add_table('deployable-names') deployable_names.add_primary_key('deployable_name') deployable_names.add_schema({ 'type': 'object', 'properties': { 'deployable_name': { 'pattern': r'^([a-z-]){3,20}$' }, 'display_name': { 'type': 'string' }, 'tags': { 'type': 'array', 'items': { 'type': 'string' } }, }, 'required': ['display_name', 'tags'], }) deployable_names.add_default_values({'tags': []}) deployables = ts.add_table('deployables') deployables.add_primary_key('tier_name,deployable_name') deployables.add_foreign_key('tier_name', 'tiers') deployables.add_foreign_key('deployable_name', 'deployable-names') deployables.add_schema({ 'type': 'object', 'properties': { 'release': { 'type': 'string' }, 'is_active': { 'type': 'boolean' }, 'reason_inactive': { 'type': 'string' }, }, 'required': ['is_active'], }) deployables.add_default_values({'is_active': False}) products = ts.add_table('products') products.add_primary_key('product_name') products.add_foreign_key('organization_name', 'organizations') products.add_schema({ 'type': 'object', 'properties': { 'product_name': { 'pattern': r'^([a-z0-9-]){3,35}$' }, 'state': { 'enum': ['initializing', 'active', 'disabled', 'deleted'] }, 'deployables': { 'type': 'array', 'items': { 'type': 'string' } }, }, 'required': ['organization_name', 'deployables'], }) products.add_default_values({'state': 'active', 'deployables': []}) # Waiting a little bit with fixups of tenant-names and tenants table schema if "temporary fixy fix": tenant_names = ts.add_table('tenant-names') tenant_names.add_primary_key('tenant_name') tenant_names.add_foreign_key('product_name', 'products') tenant_names.add_foreign_key('organization_name', 'organizations') tenant_names.add_foreign_key('tier_name', 'tiers') tenant_names.add_unique_constraint('alias') tenant_names.add_schema({ 'type': 'object', 'properties': { 'tenant_name': { 'pattern': r'^([a-z0-9-]){3,30}$' }, 'alias': { 'pattern': r'^([a-z0-9-]){3,30}$' }, 'reserved_at': { 'format': 'date-time' }, 'reserved_by': { 'type': 'string' }, }, 'required': ['product_name', 'organization_name', 'tier_name'], }) tenant_names.add_default_values({'reserved_at': '@@utcnow'}) tenants = ts.add_table('tenants') tenants.add_primary_key('tier_name,deployable_name,tenant_name') tenants.set_row_as_file(subfolder_name=tenants.name, group_by='tier_name,tenant_name') tenants.add_foreign_key('tier_name', 'tiers') tenants.add_foreign_key('deployable_name', 'deployable-names') tenants.add_foreign_key('tenant_name', 'tenant-names') tenants.add_schema({ 'type': 'object', 'properties': { 'state': { 'enum': [ 'initializing', 'active', 'disabled', 'uninitializing', 'deleted' ] }, }, }) tenants.add_default_values({'state': 'initializing'}) else: tenant_names = ts.add_table('tenant-names') tenant_names.add_primary_key('tenant_name,product_name') tenant_names.set_row_as_file(subfolder_name='tenants', group_by='product_name') tenant_names.add_unique_constraint('tenant_name') tenant_names.add_foreign_key('product_name', 'products') tenant_names.add_foreign_key('organization_name', 'organizations') tenant_names.add_foreign_key('tier_name', 'tiers') tenant_names.add_schema({ 'type': 'object', 'properties': { 'tenant_name': { 'pattern': r'^([a-z0-9-]){3,30}$' }, 'reserved_at': { 'format': 'date-time' }, 'reserved_by': { 'type': 'string' }, }, 'required': ['product_name', 'organization_name', 'tier_name'], }) tenant_names.add_default_values({'reserved_at': '@@utcnow'}) tenants = ts.add_table('tenants') tenants.add_primary_key('deployable_name,tenant_name') tenants.set_row_as_file(subfolder_name='tenants', group_by='tenant_name') tenants.add_foreign_key('deployable_name', 'deployable-names') tenants.add_foreign_key('tenant_name', 'tenant-names') tenants.add_schema({ 'type': 'object', 'properties': { 'state': { 'enum': [ 'initializing', 'active', 'disabled', 'uninitializing', 'deleted', ] }, }, }) tenants.add_default_values({'state': 'initializing'}) public_keys = ts.add_table('public-keys') public_keys.set_row_as_file(subfolder_name='authentication') public_keys.add_primary_key('tier_name,deployable_name') public_keys.add_foreign_key('tier_name', 'tiers') public_keys.add_foreign_key('deployable_name', 'deployable-names') public_keys.add_schema({ 'type': 'object', 'properties': { 'keys': { 'type': 'array', 'items': { 'type': 'object', 'properties': { 'issued': { 'format': 'date-time' }, 'expires': { 'format': 'date-time' }, 'public_key': { 'type': 'string' }, 'private_key': { 'type': 'string' }, }, } }, }, }) platforms = ts.add_table('platforms') platforms.set_row_as_file(subfolder_name='authentication') platforms.add_primary_key('product_name,provider_name') platforms.add_foreign_key('product_name', 'products') platforms.add_schema({ 'type': 'object', 'properties': { 'provider_details': { 'type': 'object' }, }, 'required': ['provider_details'], }) ''' users: organization_name string, pk, fk->organizations, required user_name string, pk, required create_date datetime, required, default=@@utcnow valid_until datetime is_active boolean, required, default=true password string access_key string is_service boolean, required, default=false is_role_admin boolean, required, default=false meta subfolder_name=authentication ''' users = ts.add_table('users') users.set_row_as_file(subfolder_name='authentication') users.add_primary_key('organization_name,user_name') users.add_foreign_key('organization_name', 'organizations') users.add_schema({ 'type': 'object', 'properties': { 'user_name': { 'pattern': r'^([a-z0-9_]){2,30}$' }, 'create_date': { 'format': 'date-time' }, 'valid_until': { 'format': 'date-time' }, 'is_active': { 'type': 'boolean' }, 'password': { 'type': 'string' }, 'access_key': { 'type': 'string' }, 'is_service': { 'type': 'boolean' }, 'is_role_admin': { 'type': 'boolean' }, 'access_key': { 'type': 'string' }, }, 'required': ['create_date', 'is_active', 'is_service', 'is_role_admin'], }) users.add_default_values({ 'create_date': '@@utcnow', 'is_active': True, 'is_service': False, 'is_role_admin': False }) ''' # dynamically populated by deployables during "init" phase access-roles: role_name string, pk deployable_name string, fk->deployables, required description string ''' access_roles = ts.add_table('access-roles') access_roles.set_row_as_file(subfolder_name='authentication') access_roles.add_primary_key('role_name') platforms.add_foreign_key('deployable_name', 'deployable-names') access_roles.add_schema({ 'type': 'object', 'properties': { 'description': { 'type': 'string' }, }, 'required': ['deployable_name'], }) ''' users-acl: organization_name string, pk, fk->organizations, required user_name string, pk, fk->users, required role_name string, pk, fk->user-roles, required tenant_name string, fk->tenants ''' users_acl = ts.add_table('users-acl') users_acl.set_row_as_file(subfolder_name='authentication') users_acl.add_primary_key('organization_name,user_name,role_name') users_acl.add_foreign_key('organization_name', 'organizations') users_acl.add_foreign_key('user_name', 'users') users_acl.add_foreign_key('role_name', 'access-roles') users_acl.add_foreign_key('tenant_name', 'tenant-names') # RELEASE MANAGEMENT - THIS SHOULDN'T REALLY BE IN THIS FILE HERE, or what? ''' instances: tier_name string, pk, fk->tiers deployable_name string, pk, fk->deployables autoscaling min integer max integer desired integer instance_type string, required release_version string ''' instances = ts.add_table('instances') instances.set_subfolder_name('release-mgmt') instances.add_primary_key('tier_name,deployable_name') instances.add_foreign_key('tier_name', 'tiers') instances.add_foreign_key('deployable_name', 'deployable-names') instances.add_schema({ 'type': 'object', 'properties': { 'autoscaling': { 'type': 'object', 'properties': { 'min': { 'type': 'integer' }, 'max': { 'type': 'integer' }, 'desired': { 'type': 'integer' }, 'instance_type': { 'type': 'string' }, } }, 'release_version': { 'type': 'string' }, } }) # API ROUTER STUFF - THIS SHOULDN'T REALLY BE IN THIS FILE HERE ''' nginx: tier_name string, pk, fk->tiers ''' nginx = ts.add_table('nginx') nginx.add_primary_key('tier_name') nginx.set_subfolder_name('api-router', ) nginx.add_schema({ 'type': 'object', 'properties': { 'worker_rlimit_nofile': { 'type': 'integer' }, 'worker_connections': { 'type': 'integer' }, 'api_key_passthrough': { 'type': 'array', 'items': { 'type': 'object', 'properties': { 'key_name': { 'type': 'string' }, 'key_value': { 'type': 'string' }, 'product_name': { 'type': 'string' }, }, 'required': ['key_name', 'key_value', 'product_name'], } }, } }) ''' a yaml representation would be: --- type: object properties: worker_connections: {type: integer} worker_rlimit_nofile: {type: integer} api_key_passthrough: type: array items: type: object required: [key_name, key_value ,product_name] properties: key_name: {type: string} key_value: {type: string} product_name: {type: string} ''' ''' routing: deployable_name string, pk, fk->deployables api string, required ''' routing = ts.add_table('routing') routing.set_subfolder_name('api-router') routing.add_primary_key('tier_name,deployable_name') routing.add_foreign_key('deployable_name', 'deployable-names') routing.add_schema({ 'type': 'object', 'properties': { 'api': { 'type': 'string' }, 'requires_api_key': { 'type': 'boolean' }, }, 'required': ['requires_api_key'], }) routing.add_default_values({'requires_api_key': False}) ''' api-keys: api_key_name string, pk product_name string, fk->products, required in_use boolean, default=true, required create_date datetime, default=@@utcnow key_type enum product|custom, default=product, required custom_data string ''' keys = ts.add_table('api-keys') keys.set_subfolder_name('api-router') keys.add_primary_key('api_key_name') keys.add_foreign_key('product_name', 'products') keys.add_schema({ 'type': 'object', 'properties': { 'in_use': { 'type': 'boolean' }, 'create_date': { 'format': 'date-time' }, 'key_type': { 'enum': ['product', 'custom'] }, 'custom_data': { 'type': 'string' }, }, 'required': ['in_use', 'key_type'], }) keys.add_default_values({ 'in_use': True, 'create_date': '@@utcnow', 'key_type': 'product' }) ''' api-key-rules: product_name string, pk, fk->products rule_name string, pk assignment_order integer, required version_patterns array of strings, required is_active boolean, required, default=true rule_type enum pass|redirect|reject, required, default=pass response_header dict redirect: tenant_name string reject: status_code integer response_body dict ''' keyrules = ts.add_table('api-key-rules') keyrules.set_subfolder_name('api-router') keyrules.add_primary_key('product_name,rule_name') keyrules.add_foreign_key('product_name', 'products') keyrules.add_schema({ 'type': 'object', 'properties': { 'assignment_order': { 'type': 'integer' }, 'version_patterns': { 'type': 'array', 'items': { 'type': 'string' } }, 'is_active': { 'type': 'boolean' }, 'rule_type': { 'enum': ['pass', 'redirect', 'reject'] }, 'response_header': { 'type': 'object' }, 'redirect': { 'type': 'object', 'properties': { 'tenant_name': { 'type': 'string' }, } }, 'reject': { 'type': 'object', 'properties': { 'status_code': { 'type': 'integer' }, 'response_body': { 'type': 'object' }, } }, }, 'required': ['assignment_order', 'version_patterns', 'is_active', 'rule_type'], }) keyrules.add_default_values({'is_active': True, 'rule_type': 'pass'}) ''' ue4-gameservers/ ue4-gameservers-config (single row): build_archive_defaults region string bucket_name string ue4_builds_folder string ''' ue4_gameservers_config = ts.add_table('ue4-gameservers-config', single_row=True) ue4_gameservers_config.set_subfolder_name('ue4-gameservers') ue4_gameservers_config.add_schema({ 'type': 'object', 'properties': { 'build_archive_defaults': { 'type': 'object', 'properties': { 'region': { 'type': 'string' }, 'bucket_name': { 'type': 'string' }, 'ue4_builds_folder': { 'type': 'string' }, }, }, }, }) ''' ue4-build-artifacts product_name string, pk, fk->products, required s3_region string bucket_name string path string command_line string ''' ue4_build_artifacts = ts.add_table('ue4-build-artifacts') ue4_build_artifacts.set_subfolder_name('ue4-gameservers') ue4_build_artifacts.add_primary_key('product_name') ue4_build_artifacts.add_foreign_key('product_name', 'products') ue4_build_artifacts.add_schema({ 'type': 'object', 'properties': { 's3_region': { 'type': 'string' }, 'bucket_name': { 'type': 'string' }, 'path': { 'type': 'string' }, 'command_line': { 'type': 'string' }, }, 'required': ['s3_region', 'bucket_name', 'path', 'command_line'], }) ''' gameservers-machines: product_name string, pk, fk->products, required group_name string, pk region string, pk platform enum windows|linux, required autoscaling min integer max integer desired integer instance_type string, required ''' gameservers_machines = ts.add_table('gameservers-machines') gameservers_machines.set_subfolder_name('ue4-gameservers') gameservers_machines.add_primary_key('product_name,group_name,region') gameservers_machines.add_foreign_key('product_name', 'products') gameservers_machines.add_schema({ 'type': 'object', 'properties': { 'region': { 'type': 'string' }, 'platform': { 'enum': ['windows', 'linux'] }, 'autoscaling': { 'type': 'object', 'properties': { 'min': { 'type': 'integer' }, 'max': { 'type': 'integer' }, 'desired': { 'type': 'integer' }, 'instance_type': { 'type': 'string' }, } }, }, }) ''' gameservers-instances: gameserver_instance_id string, pk, default=@@identity product_name string, fk->products, required group_name string, fk->gameservers-machines, required region string, fk->gameservers-machines, required tenant_name string, fk->tenants, required ref string, required processes_per_machine integer, required command_line string ''' gameservers_instances = ts.add_table('gameservers-instances') gameservers_instances.set_subfolder_name('ue4-gameservers') gameservers_instances.add_primary_key('gameserver_instance_id') gameservers_instances.add_foreign_key('product_name', 'products') #gameservers_instances.add_foreign_key('group_name,region', 'gameservers-machines') gameservers_instances.add_foreign_key('tenant_name', 'tenant-names') gameservers_instances.add_schema({ 'type': 'object', 'properties': { 'ref': { 'type': 'string' }, 'processes_per_machine': { 'type': 'integer' }, 'command_line': { 'type': 'string' }, }, 'required': [ 'product_name', 'group_name', 'region', 'tenant_name', 'ref', 'processes_per_machine' ], }) gameservers_instances.add_default_values( {'gameserver_instance_id': '@@identity'}) ''' metrics: tier_name string, pk, fk->tiers deployable_name string, pk, fk->deployables ''' metrics = ts.add_table('metrics') metrics.add_primary_key('tenant_name') metrics.add_foreign_key('tenant_name', 'tenant-names') metrics.add_schema({ 'type': 'object', 'properties': { 's3_bucket': { 'type': 'string' }, }, 'required': ['s3_bucket'], }) # END OF TABLE DEFS definition = ts.get_definition() new_ts = TableStore() new_ts.init_from_definition(definition) return new_ts