def get_table_schema(cls): columns = [] constraints = [] for name, column in cls.columns.items(): column = deepcopy(column) # exclude extras that are not known to storage for key in cls.COLUMN_EXTRAS: column.pop(key, None) column = G('column', **column) column['name'] = name columns.append(column) for name, constraint in getattr(cls, 'constraints', {}).items(): constraint = G('constraint', **constraint) constraint['name'] = name constraints.append(constraint) table = Table(cls.name, backend=get_parser(Backend.SQLITE), columns=columns, constraints=constraints) return { 'type': 'table', 'columns': table.columns, 'constraints': table.constraints, 'indexes': table.indexes }
async def test_apply(): # 0. define constants timestamp_column = G('column', type='timestamp with time zone', null=True) unique_name = G('constraint', type='unique', columns=['name']) table_definition = { "columns": { "id": G('column', type='integer', primary='test__id__pk', sequence=True), "name": G('column', type='text', null=True) }, "constraints": { "test__id__pk": G('constraint', type='primary', columns=['id']) } } schema = {"main": {"test": table_definition}} # setting scope to "main" ensures we do not try to remove # any of the special schemas in MySQL/Postgres (e.g. information_schema) # this is not necessary for SQLite scope = {"schemas": {"main": True}} # 1. setup test database for type in ('postgres', 'sqlite'): async with setup_test_database("source", type=type, verbose=VERBOSE) as source: # 2. create test schematic elements using Database.apply await source.apply(schema, scope=scope) # 3. read the table back out and verify schema model = await source.get_model("test", schema='main') table = model.table assert table is not None assert table.pk == 'id' assert len(table.pks) == 1 assert next(iter(table.pks)) == 'id' columns = table_definition['columns'] if type == 'postgres': columns = deepcopy(columns) columns['id']['sequence'] = 'main.test__id__seq' columns['id']['default'] = {'nextval': "'main.test__id__seq'"} elif type == 'sqlite': columns = deepcopy(columns) columns['id']['primary'] = True assert table.columns == columns
async def test_workflow(): # 0. define constants user_definition = G( 'table', columns={ 'id': G('column', type='integer', primary='user_id'), 'name': G('column', type='text', null=True), 'email': G('column', type='text') }, constraints={ 'user_id': G('constraint', type='primary', columns=['id']) }, indexes={ 'user_id': G('index', primary=True, unique=True, type='btree', columns=['id']) } ) action_definition = G( 'table', columns={ 'id': G('column', type='integer', primary='action_id'), 'data': G('column', type='text'), 'user_email': G('column', type='text'), 'user_name': G('column', type='text', null=True) }, constraints={ 'action_id': G('constraint', type='primary', columns=['id']) } ) async with setup_test_database('main', verbose=VERBOSE) as main: async with setup_test_database('aux', verbose=VERBOSE) as aux: migrate = ( 'WITH users AS (select * from migrate.user) ' "UPDATE action " "SET user_name = users.name " "FROM users " "WHERE upper(user_email) = upper(users.email)" ) cleanup = "DROP SCHEMA migrate CASCADE" config = { 'adbc': {'version': '0.0.1'}, 'databases': { 'main': main.url, 'aux': aux.url }, 'workflows': { 'migrate-actions': { 'steps': [{ 'type': 'copy', 'source': 'main', 'target': 'aux', 'scope': { 'schemas': { 'public': { 'target': 'migrate', 'tables': { 'user': True } } } } }, { 'type': 'query', 'source': 'aux', 'query': migrate }, { 'type': 'query', 'source': 'aux', 'query': cleanup }] } }, } # setup main database: users # setup aux database: actions await main.create_table('user', user_definition) user = await main.get_model('user') users = [{ 'id': 1, 'name': 'Alf', 'email': '*****@*****.**' }] await user.values(users).add() await aux.create_table('action', action_definition) action = await aux.get_model('action') actions = [{ 'id': 1, 'data': 'logged in', 'user_email': '*****@*****.**', 'user_name': None }, { 'id': 2, 'data': 'logged in', 'user_email': '*****@*****.**', 'user_name': 'alf' }, { 'id': 4, 'data': 'logged in', 'user_name': 'old', 'user_email': '*****@*****.**' }] await action.values(actions).add() engine = WorkflowEngine(config) results = await engine.run('migrate-actions') copy_result = { "data_changes": { "migrate": { "user": { "copied": 1, "skipped": 0 } } }, "schema_changes": { "+": { "migrate": { "user": user_definition } } }, "final_diff": {} } migrate_result = 'UPDATE 2' cleanup_result = 'DROP SCHEMA' results[0].pop('duration') assert results == [ copy_result, migrate_result, cleanup_result ] name = await action.key(1).field('user_name').get() assert name == 'Alf'
def init_table(self): """Normal table initializer""" # in one loop, create a two-way values binding for these properties: # - sequence: based on auto-increment (MySQL, SQLite) or nextval (Postgres) # - primary: based on primary constraint or index # - unique: based on unique constraints # - related: based on foreign key constraints constraints = self.constraints self.pks = get_pks(constraints) or {} uniques = self.uniques = get_uniques(constraints) fks = self.fks = get_fks(constraints) for name, column in self.columns.items(): if "default" in column: default = column['default'] = self.backend.parse_expression( column['default']) if isinstance(default, dict) and 'nextval' in default: # TODO: move nextval into backend, non-standard SQL if not self.sequences_enabled: column['default'] = None else: column["sequence"] = column.get( "sequence", default['nextval'][1:-1]) else: column["sequence"] = column.get( "sequence", False) if self.sequences_enabled else False if column.get("primary"): if name not in self.pks: primary = column.get("primary") constraint_name = (primary if isinstance(primary, str) else f"{self.name}__{name}__pk") self.pks[name] = constraint_name constraints[constraint_name] = G('constraint', type=PRIMARY, columns=[name]) else: column["primary"] = self.pks.get(name, False) if not column.get('choices'): # TODO: support getting choices from passed-in scope # rather than column definition column['choices'] = None if column.get("unique"): if name not in uniques: unique = column.get("unique") constraint_name = (unique if isinstance(unique, str) else f"{self.name}__{name}__uk") uniques[name] = constraint_name constraints[constraint_name] = G('constraint', type=UNIQUE, columns=[name]) else: column["unique"] = uniques.get(name, False) related = column.get('related') if related: if name not in fks: constraint_name = (related if isinstance(related, str) else f"{self.name}__{name}__fk") by = related['by'] to = related['to'] if not isinstance(by, list): by = [by] fks[name] = {'to': to, 'by': by, 'name': constraint_name} constraints[constraint_name] = { "type": FOREIGN, "deferred": False, "deferrable": False, "columns": [name], "related_name": to, "related_columns": by } else: column['related'] = fks.get(name, None) if not self.pks: self.pks = {name: True for name in self.column_names} if len(self.pks) == 1: self.pk = next(iter(self.pks)) else: self.pk = None
async def test_diff(): # 0. define constants timestamp_column = G("column", type="timestamp with time zone", null=True) unique_constraint = G("constraint", type="unique", columns=["name"]) source_definition = G( "table", columns={ "id": G("column", type="integer"), "name": G("column", type="text", null=True), "bleh": G("column", type="text", null=True) }, constraints={ "test_id": G("constraint", type="primary", columns=["id"]) }, ) target_definition = G("table", columns={ "id": G("column", type="integer"), "name": G("column", type="text", null=True), "meh": G("column", type="text", null=True) }, constraints={ "test_id": G("constraint", type="primary", columns=["id"]) }) copy_definition = G("table", columns={ "id": G("column", type="integer"), "name": G("column", type="text", null=True), }, constraints={ "copy_id": G("constraint", type="primary", columns=["id"]) }) unique_index = G("index", columns=["name"], type="btree", unique=True) scope = { "schemas": { "main": { # alias schema name: main "source": "public", # actually refers to "public" schema in source "target": "testing", # actually refers to "testing" schema in target "tables": { "*": True, "test": { # alias table name: test "target": "test2", # actually refers to "test2" in target "columns": { "*": True, "log": { # alias column name: log "source": "bleh", # actually refers to "meh" in target "target": "meh" # actually refers to "bleh" in source } } } } } } } # 1. setup test databases async with setup_test_database("source", verbose=VERBOSE) as source: async with setup_test_database("target", verbose=VERBOSE) as target: # 2. create test schematic elements: table in both source and target # the table called "copy" will remain the same on both source/target after initial setup # the table called "test" will change on both source/target throughout the test await source.create_table("test", source_definition) await source.create_table("copy", copy_definition) await target.create_schema("testing") await target.create_table("test2", target_definition, schema="testing") await target.create_table("copy", copy_definition, schema="testing") # 3. add data source_model = await source.get_model("test") source_copy = await source.get_model("copy") source_table = source_model.table target_model = await target.get_model("test2", schema='testing') target_copy = await target.get_model("copy", schema='testing') target_table = target_model.table # add (INSERT) for model, field in ((source_model, 'bleh'), (source_copy, None), (target_model, 'meh'), (target_copy, None)): values = {'id': 1, 'name': 'Jay'} if field: values[field] = 'test' await model.values(values).take("*").add() values['id'] = 2 values['name'] = 'Quinn' await model.values(values).add() values['id'] = 3 values['name'] = 'Hu' await model.values(values).add() # 4. check diff -> expect none diff = await source.diff(target, scope=scope, hashes=True) assert diff == {} # 5. make changes in source await source.create_column("test", "created", timestamp_column) await source.create_constraint("test", "name_unique", unique_constraint) # 6. make changes in target await target_model.values([{ "id": 10, "name": "Jim" }, { "id": 9, "name": "Jane" }]).add() await target.create_column("test2", "updated", timestamp_column, schema="testing") await target.alter_column("test2", "name", patch={"null": False}, schema="testing") # 7. diff and validate changes # reset caches target.reset() source.reset() diff = await source.diff(target, scope=scope, hashes=True) assert diff == { "main": { "test": { "rows": { "range": { "id": { "max": [3, 10] } }, "count": [3, 5], "hashes": { 1: [ 'ccbc0c558e02b662d1d413571332de21', '9e6a96b10278b39d6988001d11a936fb' ] } }, "columns": { insert: { "updated": timestamp_column }, delete: { "created": timestamp_column }, "name": { "null": [True, False], "unique": ['name_unique', False] }, }, "constraints": { delete: { "name_unique": unique_constraint } }, "indexes": { delete: { "name_unique": unique_index } }, } } }
async def test_copy(): # 0. define constants timestamp_column = G('column', type='timestamp with time zone', null=True) test_size = 1000 copy_size = 1000 other_size = 100 table_definition = G('table', columns={ 'id': G('column', type='integer', primary='test_id'), 'slug': G('column', type='text', null=True), 'name': G('column', type='text') }, constraints={ 'test_id': G('constraint', type='primary', columns=['id']) }, indexes={ 'test_id': G('index', type='btree', primary=True, unique=True, columns=['id']) }) target_definition = G('table', columns={ 'id': G('column', type='integer', primary='test_id'), 'slug': G('column', type='text', null=True), 'anombre': G('column', type='text') }, constraints={ 'test_id': G('constraint', type='primary', columns=['id']) }, indexes={ 'test_id': G('index', type='btree', primary=True, unique=True, columns=['id']) }) copy_definition = G('table', columns={ 'id': G('column', type='integer', primary='copy_id'), 'name': G('column', type='text') }, constraints={ 'copy_id': G('constraint', type='primary', columns=['id']) }, indexes={ 'copy_id': G('index', type='btree', primary=True, unique=True, columns=['id']) }) other_definition = G('table', columns={ 'id': G('column', type='integer', primary='other_id'), 'name': G('column', type='text') }, constraints={ 'other_id': G('constraint', type='primary', columns=['id']) }, indexes={ 'other_id': G('index', type='btree', primary=True, unique=True, columns=['id']) }) unique_constraint = G('constraint', type='unique', columns=['slug']) unique_index = G('index', type='btree', unique=True, columns=['slug']) # scope includes "copy" and "test" but not "other" scope = { "schemas": { "main": { "source": "public", "target": "testing", "tables": { "copy": True, "test": { "target": "prueba", # renamed from "test" to "prueba" in "target" "columns": { "*": True, "name": { "target": "anombre" # renamed from "name" to "anombre" } } } } } } } # 1. setup test databases async with setup_test_database("source", verbose=VERBOSE) as source: async with setup_test_database("target", verbose=VERBOSE) as target: # 2. create test schematic elements: table in source and target # the table called "copy" will remain the same on both source/target after initial setup # the table called "test" will change on source during the test await source.create_table("test", table_definition) await source.create_table("copy", copy_definition) await source.create_table("other", other_definition) # 3. add data source_model = await source.get_model("test") source_copy = await source.get_model('copy') source_other = await source.get_model('other') source_table = source_model.table # add (INSERT) for model, size in ((source_model, test_size), (source_copy, copy_size), (source_other, other_size)): # PreQL to generate series (cannot use model interface to do this) query = { 'insert': { 'table': model.table.full_name, 'columns': ['id', 'name'], 'values': { 'select': { 'data': ['S', { 'name': { 'concat': ['`name`', 'S'] } }], 'from': { 'S': { 'generate_series': [1, size] } } } } } } await source.execute(query) # create a target entry in a different schema await target.create_schema('other') await target.create_table("test", target_definition, schema='other') other_model = await target.get_model('test', schema='other') await other_model.values({'id': 1, 'anombre': 'other-1'}).add() # 4. run copy to populate target data = await source.copy(target, scope=scope) assert data == { 'schema_changes': { insert: { 'testing': { 'prueba': target_definition, 'copy': copy_definition } } }, 'data_changes': { 'testing': { 'copy': { 'copied': copy_size, 'skipped': 0 }, 'prueba': { 'copied': test_size, 'skipped': 0 } } }, 'final_diff': {} } # check to make sure "test" was moved to "prueba" target_model = await target.get_model('prueba', schema='testing') assert target_model is not None target_count = await target_model.count() assert target_count == test_size # check to make sure "other" was not moved target_other = None try: target_other = await target.get_model('other', schema='testing') except Exception: pass assert target_other is None # 5. make schema and data changes in source await source.create_column("test", "created", timestamp_column) await source.create_constraint("test", "slug_unique", unique_constraint) await source.alter_column('test', 'name', patch={'null': True}) await source_model.key(1).values({'name': 'changed-1'}).set() await source_model.key(3).delete() await source_model.values({ 'id': test_size + 1, 'name': 'new' }).add() await source.create_column('test', 'updated', timestamp_column) # reset both databases to re-trigger schema queries # before next copy source.reset() # 6. run copy again data = await source.copy(target, scope=scope) data_changes = data.pop('data_changes') assert data_changes['testing']['copy'] == { 'copied': 0, 'skipped': copy_size } assert len(data_changes.keys()) == 1 # TODO: validate proper renaming of columns # referenced inside of constraints and indexes... assert data == { 'final_diff': {}, 'schema_changes': { 'testing': { 'prueba': { 'columns': { insert: { 'created': timestamp_column, 'updated': timestamp_column }, 'anombre': { 'null': [False, True] } }, 'constraints': { insert: { 'slug_unique': unique_constraint } }, 'indexes': { insert: { 'slug_unique': unique_index } } } } }, } # make sure our other model in a different schema is unchanged other_count = await other_model.count() assert other_count == 1 record = await other_model.key(1).get() assert dict(record) == { 'id': 1, 'anombre': 'other-1', 'slug': None }
async def test_info(): # 0. define constants timestamp_column = G('column', type='timestamp with time zone', null=True) unique_name = G('constraint', type='unique', columns=['name']) related_definition = { "columns": { "id": G('column', type='integer', primary=True, sequence=True) } } table_definition_ = { "columns": { "id": G('column', type='integer', primary='test__id__pk', sequence=True), "related_id": G('column', type='integer', null=True, related={ 'to': 'main.related', 'by': ['id'] }), "name": G('column', type='text', null=True) }, "constraints": { "test__id__pk": G('constraint', type='primary', columns=['id']) # FK for "related_id" intentionally omitted } } scope = {"schemas": {"main": True}} alias_scope = {"schemas": {"main": {"source": "main"}}} # 1. setup test database for type in ('postgres', 'sqlite'): async with setup_test_database("source", type=type, verbose=VERBOSE) as source: table_definition = deepcopy(table_definition_) # 2. create test schematic elements: schema and table if type != 'sqlite': # sqlite does not support create schema and has a default schema "main" await source.create_schema("main") await source.create_table("related", related_definition, schema="main") await source.create_table("test", table_definition, schema="main") # 3. get/add/edit/delete data model = await source.get_model("test", schema='main') table = model.table assert table is not None assert table.pk == 'id' table_definition['columns']['related_id']['related'][ 'name'] = 'test__related_id__fk' if type == 'postgres': table_definition['columns']['id'][ 'sequence'] = 'main.test__id__seq' table_definition['columns']['id']['default'] = { 'nextval': "'main.test__id__seq'" } if type == 'sqlite': table_definition['columns']['id']['primary'] = True table_definition['columns']['related_id']['related'][ 'to'] = 'related' assert table.columns == table_definition['columns'] # add (INSERT) await model.values({"name": "Jay"}).add() await model.values({"name": "Quinn"}).add() await model.add() # count/get (SELECT) # simple count count = await model.count() assert count == 3 # query count query = model.where({ "or": [{ "icontains": ["name", "'ja'"] }, { "in": ['id', [3, 999]] }] }) count = await query.count() assert count == 2 results = await query.sort("id").get() assert len(results) == 2 assert dict(results[0]) == { "id": 1, "name": "Jay", 'related_id': None } assert dict(results[1]) == { "id": 3, "name": None, 'related_id': None } json_results = await query.sort('id').get(json=True) # cast to normal dict to ensure json.dumps can handle assert json.dumps(json.loads(json_results)) == json.dumps( [dict(r) for r in results]) json_result = await model.key(3).one(json=True) assert json.loads(json_result) == { 'id': 3, 'name': None, 'related_id': None } # UPDATE # TODO: use where(id=3) to test where in set updated = await model.key(3).values({"name": "Ash"}).set() assert updated == 1 # DELETE deleted = await model.key(2).delete() assert deleted == 1 # 5. get database statistics info = await source.get_info(scope=scope) expect_schema = table_definition actual_schema = info["main"]["test"] expect_schema['constraints']['test__related_id__fk'] = G( 'constraint', type='foreign', columns=['related_id'], related_name='main.related' if type != 'sqlite' else 'related', related_columns=['id']) if type == 'postgres': seq = info['main']['test__id__seq'] assert seq == {'value': 3, 'type': 'sequence'} actual_data = actual_schema['rows'] assert expect_schema["columns"] == actual_schema["columns"] assert expect_schema["constraints"] == actual_schema["constraints"] assert actual_data["count"] == 2 assert actual_data["range"] == {"id": {"min": 1, "max": 3}} # 6. add new schema elements table_definition["columns"]["created"] = timestamp_column await source.create_column("test", "created", timestamp_column, schema="main") if type != 'sqlite': table_definition["constraints"]["unique_name"] = unique_name await source.create_constraint("test", "unique_name", unique_name, schema="main") # 7. add new data await model.values([{ "id": 6, "name": "Jim" }, { "id": 5, "name": "Jane" }]).add() # 8. get database statistics again with an aliased scope # alias scope supports translation during diff/copy info = await source.get_info(scope=alias_scope, hashes=True) actual_schema = info["main"]["test"] actual_data = actual_schema["rows"] if type != 'sqlite': # expect unique to be set expect_schema['columns']['name']['unique'] = 'unique_name' assert expect_schema["columns"] == actual_schema["columns"] assert expect_schema["constraints"] == actual_schema["constraints"] assert actual_data["count"] == 4 assert actual_data["range"] == {"id": {"min": 1, "max": 6}} assert actual_data['hashes'] == { 1: '79bf96eadff2c10ee9c81356d53ef51e' } # 9. test exclusion: ignore certain fields excludes = ['unique', 'primary', 'related', 'type'] info = await source.get_info(scope=alias_scope, hashes=True, exclude={'columns': excludes}) actual_schema = info['main']['test'] expected = deepcopy(expect_schema) for name, column in expected['columns'].items(): for exclude in excludes: column.pop(exclude) assert expected['columns'] == actual_schema['columns']