def test_model_and_migration_schemas_are_the_same(uri_left, uri_right, alembic_config_left): """Compare two databases. Compares the database obtained with all migrations against the one we get out of the models. """ prepare_schema_from_migrations(uri_left, alembic_config_left) prepare_schema_from_models(uri_right, Base) result = compare(uri_left, uri_right, set(['alembic_version'])) assert result.is_match
def test_model_and_migration_schemas_are_the_same(self, uri_left, uri_right, alembic_config_left): """Compare two databases. Compares the database obtained with all migrations against the one we get out of the models. """ setup_extensions(uri_left) prepare_schema_from_migrations(uri_left, alembic_config_left) from ingredients_db.models.images import Image, ImageMembers from ingredients_db.models.instance import Instance, InstanceKeypair from ingredients_db.models.network import Network from ingredients_db.models.network_port import NetworkPort from ingredients_db.models.project import Project, ProjectMembers from ingredients_db.models.keypair import Keypair from ingredients_db.models.task import Task from ingredients_db.models.authn import AuthNUser, AuthNServiceAccount from ingredients_db.models.authz import AuthZPolicy, AuthZRole, AuthZRolePolicy from ingredients_db.models.region import Region from ingredients_db.models.zones import Zone from ingredients_db.models.builtin import BuiltInUser # Make sure the imports don't go away Image.mro() ImageMembers.mro() Instance.mro() InstanceKeypair.mro() Network.mro() NetworkPort.mro() Project.mro() ProjectMembers.mro() Keypair.mro() Task.mro() AuthNUser.mro() AuthNServiceAccount.mro() AuthZPolicy.mro() AuthZRole.mro() AuthZRolePolicy.mro() Region.mro() Zone.mro() BuiltInUser.mro() setup_extensions(uri_right) prepare_schema_from_models(uri_right, Base) result = compare(uri_left, uri_right, {'alembic_version'}) print(json.dumps(result.errors, indent=4)) assert result.is_match
def test_model_and_migration_schemas_are_the_same(self): """Compare two databases. Compares the database obtained with all migrations against the one we get out of the models. It produces a text file with the results to help debug differences. """ prepare_schema_from_migrations(self.uri_left, self.alembic_config_left) prepare_schema_from_models(self.uri_right, Base) result = compare(self.uri_left, self.uri_right, set(['alembic_version'])) assert result.is_match
def test_same_schema_is_the_same(uri_left, uri_right, alembic_config_left, alembic_config_right): """Compare two databases both from migrations. Makes sure the schema comparer validates a database to an exact replica of itself. """ prepare_schema_from_migrations(uri_left, alembic_config_left) prepare_schema_from_migrations(uri_right, alembic_config_right) result = compare(uri_left, uri_right, set(['alembic_version'])) # uncomment to see the dump of info dict # result.dump_info() assert result.is_match
def test_model_and_migration_schemas_are_the_same(uri_left, uri_right, alembic_config_left): """Compares the database schema obtained with all migrations against the one we get out of the models. """ ah.load_premigration_sql(uri_left) prepare_schema_from_migrations(uri_left, alembic_config_left) prepare_schema_from_models(uri_right, Base) result = compare(uri_left, uri_right, ignores=['alembic_version']) assert result.is_match is True, \ 'Differences (left is migrations, right is models):\n' \ '%s' % json.dumps( result.errors, sort_keys=True, indent=4, separators=(',', ': ') )
def test_model_and_migration_schemas_are_the_same(self): """Compare two databases. Compares the database obtained with all migrations against the one we get out of the models. It produces a text file with the results to help debug differences. """ prepare_schema_from_migrations(self.uri_left, self.alembic_config_left) prepare_schema_from_models(self.uri_right, Base) result = compare(self.uri_left, self.uri_right, ignores=set([ 'alembic_version', 'menstrual_cup_fill.col.removal_time', 'menstrual_cup_fill.col.time' ])) import json assert result.is_match, json.dumps(result.errors, indent=True)
def test_model_and_migration_schemas_are_the_same( uri_left, uri_right, alembic_config_left): """Compare two databases. Compares the database obtained with all migrations against the one we get out of the models. """ prepare_schema_from_migrations(uri_left, alembic_config_left) engine = create_engine(uri_right) engine.execute('create extension postgis') prepare_schema_from_models(uri_right, Base) result = compare( uri_left, uri_right, # Ignore grids.srid fkey because of the flaky way it has to be set up; # for details see comments in definiton of `Grid` in `v2.py`. ignores={'alembic_version', 'grids.fk.grids_srid_fkey'} ) assert result.is_match
def test_prepare_schema_for_migrations_default_revision_value( Config_mock, create_engine_mock, script_directory_mock, command_mock): uri = "Migrations URI" config = Config_mock.return_value engine, script = prepare_schema_from_migrations(uri, config) assert create_engine_mock.return_value == engine assert script_directory_mock.from_config.return_value == script create_engine_mock.assert_called_once_with(uri) script_directory_mock.from_config.assert_called_once_with(config) command_mock.upgrade.assert_called_once_with(config, "head")
def test_model_and_migration_schemas_are_the_same(self): """ Compare two databases. Compares the database obtained with all migrations against the one we get out of the models. It produces a text file with the results to help debug differences. """ prepare_schema_from_migrations(self.uri_left, self.alembic_config_left) engine = create_engine(self.uri_right) engine.execute("CREATE EXTENSION pgcrypto;") prepare_schema_from_models(self.uri_right, db) result = compare(self.uri_left, self.uri_right, set(['alembic_version'])) if not result.is_match: print("###### DB MISMATCH:") pprint.PrettyPrinter(indent=1).pprint(result.errors) assert result.is_match
def test_upgrade_and_downgrade(self, uri_left, alembic_config_left): """Test all migrations up and down. Tests that we can apply all migrations from a brand new empty database, and also that we can remove them all. """ setup_extensions(uri_left) engine, script = prepare_schema_from_migrations( uri_left, alembic_config_left) head = get_head_revision(alembic_config_left, engine, script) current = get_current_revision(alembic_config_left, engine, script) assert head == current while current is not None: command.downgrade(alembic_config_left, '-1') current = get_current_revision(alembic_config_left, engine, script)
def test_upgrade_and_downgrade(uri_left, alembic_config_left): """Test all migrations up and down. Tests that we can apply all migrations from a brand new empty database, and also that we can remove them all. """ engine, script = prepare_schema_from_migrations( uri_left, alembic_config_left) head = get_head_revision(alembic_config_left, engine, script) current = get_current_revision(alembic_config_left, engine, script) assert head == current while current is not None: command.downgrade(alembic_config_left, '-1') current = get_current_revision(alembic_config_left, engine, script)
def test_model_and_migration_schemas_are_not_the_same(uri_left, uri_right, alembic_config_left): """Compares the database obtained with the first migration against the one we get out of the models. """ prepare_schema_from_migrations(uri_left, alembic_config_left, revision="+1") prepare_schema_from_models(uri_right, Base) result = compare(uri_left, uri_right, set(['alembic_version'])) # uncomment to see the dump of errors dict # result.dump_errors() errors = { 'tables': { 'left_only': ['addresses'], 'right_only': ['roles'] }, 'tables_data': { 'employees': { 'columns': { 'left_only': [{ 'default': None, 'name': 'favourite_meal', 'nullable': False, 'type': "ENUM('meat','vegan','vegetarian')" }], 'right_only': [ { 'autoincrement': False, 'default': None, 'name': 'role_id', 'nullable': False, 'type': 'INTEGER(11)' }, { 'autoincrement': False, 'default': None, 'name': 'number_of_pets', 'nullable': False, 'type': 'INTEGER(11)' }, ] }, 'foreign_keys': { 'right_only': [{ 'constrained_columns': ['role_id'], 'name': 'fk_employees_roles', 'options': {}, 'referred_columns': ['id'], 'referred_schema': None, 'referred_table': 'roles' }] }, 'indexes': { 'left_only': [{ 'column_names': ['name'], 'name': 'name', 'type': 'UNIQUE', 'unique': True }], 'right_only': [{ 'column_names': ['role_id'], 'name': 'fk_employees_roles', 'unique': False }, { 'column_names': ['name'], 'name': 'ix_employees_name', 'type': 'UNIQUE', 'unique': True }] } }, 'phone_numbers': { 'columns': { 'diff': [{ 'key': 'number', 'left': { 'default': None, 'name': 'number', 'nullable': True, 'type': 'VARCHAR(40)' }, 'right': { 'default': None, 'name': 'number', 'nullable': False, 'type': 'VARCHAR(40)' } }] } } }, 'uris': { 'left': uri_left, 'right': uri_right, } } compare_error_dicts(errors, result.errors)
def test_12f290b63791_upgrade_data_migration(uri_left, alembic_config_left): """ Test the data migration from 614911daf883 to 12f290b63791. Note: Cannot use relative revisions because they will become invalid when later revisions are added. TODO: It would be better to pull the revision numbers out of the migration script (.revision, .down_revision), but they are not (yet) part of the modelmeta package. """ # Set up database in pre-migration schema engine, script = prepare_schema_from_migrations( uri_left, alembic_config_left, revision='614911daf883') # Define minimal set of tables needed to test migration meta_data = MetaData(bind=engine) variable_aliases = Table('variable_aliases', meta_data, autoload=True) grids = Table('grids', meta_data, autoload=True) data_files = Table('data_files', meta_data, autoload=True) data_file_variables = Table('data_file_variables', meta_data, autoload=True) # Insert minimal data needed to test migration: Several instances of each of # variable_aliases, grids, data_files, associated to a data_file_variables. num_test_records = 3 for i in range(0, num_test_records): for stmt in [ variable_aliases.insert().values( variable_alias_id=i, variable_long_name=name('var', i), variable_standard_name=name('var', i), variable_units='foo', ), grids.insert().values( grid_id=i, xc_origin=0.0, xc_grid_step=1.0, xc_count=10, xc_units='xc_units', yc_origin=0.0, yc_grid_step=1.0, yc_count=10, yc_units='yc_units', evenly_spaced_y=True, ), data_files.insert().values( data_file_id=i, filename=name('filename', i), first_1mib_md5sum='first_1mib_md5sum', unique_id=name('unique_id', i), x_dim_name='x_dim_name', y_dim_name='y_dim_name', index_time=datetime.datetime.now(), ), data_file_variables.insert().values( data_file_variable_id=i, data_file_id=i, variable_alias_id=i, grid_id=i, netcdf_variable_name=name('var', i), range_min=0.0, range_max=10.0, ) ]: engine.execute(stmt) # Run upgrade migration command.upgrade(alembic_config_left, '+1') # Check data results of migration. We can use current ORM for this. Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) sesh = Session() dfvs = sesh.query(DataFileVariable).all() assert len(dfvs) == num_test_records assert all(dfv.geometry_type == 'gridded' for dfv in dfvs) dfvs_gridded = sesh.query(DataFileVariableGridded).all() assert len(dfvs_gridded) == num_test_records assert all(dfv.grid_id == dfv.id for dfv in dfvs_gridded) assert all(dfv.variable_alias_id == dfv.id for dfv in dfvs_gridded) assert all(dfv.data_file_id == dfv.id for dfv in dfvs_gridded) sesh.close()
def test_12f290b63791_downgrade_data_migration(uri_left, alembic_config_left): """ Test the data migration from 12f290b63791 to 614911daf883. Note: Cannot use relative revisions because they will become invalid when later revisions are added. TODO: It would be better to pull the revision numbers out of the migration script (.revision, .down_revision), but they are not (yet) part of the modelmeta package. """ # Prepare database in post-migration schema engine, script = prepare_schema_from_migrations( uri_left, alembic_config_left, revision='12f290b63791') Session = sessionmaker(bind=engine) sesh = Session() # Insert minimal data needed to test migration: Several instances of each of # variable_aliases, grids, data_files, associated to a data_file_variables. # We can use current ORM for this. num_test_records = 3 for i in range(0, num_test_records): variable_alias = VariableAlias( id=i, long_name=name('var', i), standard_name=name('var', i), units='foo', ) grid = Grid( id=i, xc_origin=0.0, xc_grid_step=1.0, xc_count=10, xc_units='xc_units', yc_origin=0.0, yc_grid_step=1.0, yc_count=10, yc_units='yc_units', evenly_spaced_y=True, ) data_file = DataFile( id=i, filename=name('filename', i), first_1mib_md5sum='first_1mib_md5sum', unique_id=name('unique_id', i), x_dim_name='x_dim_name', y_dim_name='y_dim_name', index_time=datetime.datetime.now(), ) data_file_variable = DataFileVariableGridded( id=i, file=data_file, variable_alias=variable_alias, grid=grid, netcdf_variable_name=name('var', i), range_min=0.0, range_max=10.0, ) sesh.add(data_file_variable) sesh.commit() # Run downgrade migration command.downgrade(alembic_config_left, '-1') # Define minimal set of tables needed to test migration meta_data = MetaData(bind=engine) data_file_variables = Table('data_file_variables', meta_data, autoload=True) # Check data results of migration. results = list(engine.execute( data_file_variables.select() )) assert results is not None assert len(results) == num_test_records assert all(r['variable_alias_id'] == r['data_file_variable_id'] for r in results) assert all(r['grid_id'] == r['data_file_variable_id'] for r in results) assert all(r['data_file_id'] == r['data_file_variable_id'] for r in results)