def test_commits_session(self): db = Database('sqlite://') # init engine and session db.session with patch.object(db._session, 'commit', Mock()) as fake_commit: db.commit() fake_commit.assert_called_once_with()
def test_rollbacks_session(self): db = Database('sqlite://') # init engine and session db.session with patch.object(db._session, 'rollback', Mock()) as fake_rollback: db.rollback() fake_rollback.assert_called_once_with()
def test_raises_session_commit_exception(self): db = Database('sqlite://') # init engine and session db.session with patch.object(db._session, 'commit', Mock(side_effect=ValueError)): with self.assertRaises(ValueError): db.commit()
def test_raises_exception_if_dir_does_not_exist_after_creation_attempt(self): # test with patch.object(os.path, 'exists', Mock(return_value=False)): with patch.object(os, 'makedirs', Mock()) as fake_makedirs: try: db = Database('sqlite:///test_database1.db') db._create_path() except Exception as exc: self.assertIn('Couldn\'t create directory', str(exc)) fake_makedirs.assert_called_once_with('/')
def test_closes_session_and_connection(self): db = Database('sqlite://') with patch.object(db.session, 'close', Mock()) as fake_session_close: with patch.object(db.connection, 'close', Mock()) as fake_connection_close: db.close() fake_session_close.assert_called_once_with() fake_connection_close.assert_called_once_with() self.assertIsNone(db._session) self.assertIsNone(db._connection)
def test_raises_exception_if_dir_does_not_exist_after_creation_attempt( self): # test with patch.object(os.path, 'exists', Mock(return_value=False)): with patch.object(os, 'makedirs', Mock()) as fake_makedirs: try: db = Database('sqlite:///test_database1.db') db._create_path() except Exception as exc: self.assertIn('Couldn\'t create directory', str(exc)) fake_makedirs.assert_called_once_with('/')
def test_removes_all_datasets(self): db = Database('sqlite://') ds1 = Mock(spec=Dataset) ds1.name = 'ds1' ds2 = Mock(spec=Dataset) ds2.name = 'ds2' ds3 = Mock(spec=Dataset) ds3.name = 'ds3' with patch.object(Database, 'datasets', PropertyMock(return_value=[ds1, ds2, ds3])): with patch.object(Database, 'remove_dataset') as fake_remove: db.clean() self.assertEqual(len(fake_remove.mock_calls), 3)
def test_applies_new_migration_to_sqlite_database(self, fake_get): if self._db_type != 'sqlite': self.skipTest('SQLite tests are disabled.') # replace real migrations with tests migrations. test_migrations = [ (100, 'test.functional.migrations.0100_init'), (101, 'test.functional.migrations.0101_add_column'), (102, 'test.functional.migrations.0102_create_table'), (103, 'test.functional.migrations.0103_not_ready') # that should not apply ] fake_get.return_value = test_migrations # create database with initial schema with patch.object(database, 'SCHEMA_VERSION', 100): db = Database('sqlite:///{}'.format(self.sqlite_db_file)) db.create_tables() db.close() # switch version and reconnect. Now both migrations should apply. with patch.object(database, 'SCHEMA_VERSION', 102): db = Database('sqlite:///{}'.format(self.sqlite_db_file)) try: # check column created by migration 101. db.connection.execute('SELECT column1 FROM datasets;').fetchall() # check table created by migration 102. db.connection.execute('SELECT column1 FROM table1;').fetchall() # db version changed to 102 self.assertEqual(db.connection.execute('PRAGMA user_version').fetchone()[0], 102) finally: db.close()
def test_table_basic(self): """Basic operations on datasets""" db = Database(self.dsn) db.open() ds = db.new_dataset(vid=self.dn[0], source='source', dataset='dataset') ds.new_table('table1') db.commit() t1 = db.dataset(ds.vid).table('table1') t1.add_column('col1', description='foobar') db.commit()
def test_applies_new_migration_to_postgresql_database(self, fake_get): if self._db_type != 'postgres': self.skipTest('Postgres tests are disabled.') # replace real migrations with tests migrations. test_migrations = [ (100, 'test.test_orm.functional.migrations.0100_init'), (101, 'test.test_orm.functional.migrations.0101_add_column'), (102, 'test.test_orm.functional.migrations.0102_create_table'), (103, 'test.test_orm.functional.migrations.0103_not_ready' ) # that should not apply ] fake_get.return_value = test_migrations # create postgresql db postgres_test_db_dsn = self.config.library.database # PostgreSQLTestBase._create_postgres_test_db(get_runconfig())['test_db_dsn'] # populate database with initial schema with patch.object(database, 'SCHEMA_VERSION', 100): db = Database(postgres_test_db_dsn, engine_kwargs={'poolclass': NullPool}) db.create() db.close() # switch version and reconnect. Now both migrations should apply. with patch.object(database, 'SCHEMA_VERSION', 102): db = Database(postgres_test_db_dsn, engine_kwargs={'poolclass': NullPool}) try: # check column created by migration 101. db.connection\ .execute('SELECT column1 FROM {}.datasets;'.format(POSTGRES_SCHEMA_NAME))\ .fetchall() # check table created by migration 102. db.connection\ .execute('SELECT column1 FROM {}.table1;'.format(POSTGRES_SCHEMA_NAME))\ .fetchall() # db version changed to 102 db_version = db.connection\ .execute('SELECT version FROM {}.user_version;'.format(POSTGRES_SCHEMA_NAME))\ .fetchone()[0] self.assertEqual(db_version, 102) finally: db.close()
class ConvertSchemaTest(unittest.TestCase): """ tests _convert_schema function. """ def setUp(self): self.sqlite_db = Database('sqlite://') self.sqlite_db.create() def _test_converts_schema_to_resource_dict(self): DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session FileFactory._meta.sqlalchemy_session = self.sqlite_db.session ds1 = DatasetFactory() partition1 = PartitionFactory(dataset=ds1) self.sqlite_db.commit() partition1._datafile = MagicMock() ret = _convert_partition(partition1) self.assertIn('package_id', ret) self.assertEqual(ret['package_id'], ds1.vid) self.assertEqual(ret['name'], partition1.name)
def test_ignores_exception_if_makedirs_failed(self): # prepare state. calls = [] def my_exists(p): # returns False for first call and True for second. if p in calls: return True calls.append(p) return False # test with patch.object(os, 'makedirs', Mock(side_effect=Exception('Fake exception'))) as fake_makedirs: with patch.object(os.path, 'exists', Mock(side_effect=my_exists)): db = Database('sqlite:///test_database1.db') db._create_path() fake_makedirs.assert_called_once_with('/')
def test_makes_database_directory(self): # prepare state calls = [] def my_exists(p): # returns False for first call and True for second. if p in calls: return True calls.append(p) return False # test with patch.object(os, 'makedirs', Mock()) as fake_makedirs: with patch.object(os.path, 'exists', Mock(side_effect=my_exists)): db = Database('sqlite:///test_database1.db') db._create_path() # test calls fake_makedirs.assert_called_once_with('/')
def test_ignores_exception_if_makedirs_failed(self): # prepare state. calls = [] def my_exists(p): # returns False for first call and True for second. if p in calls: return True calls.append(p) return False # test with patch.object( os, 'makedirs', Mock( side_effect=Exception('Fake exception'))) as fake_makedirs: with patch.object(os.path, 'exists', Mock(side_effect=my_exists)): db = Database('sqlite:///test_database1.db') db._create_path() fake_makedirs.assert_called_once_with('/')
def test_applies_new_migration_to_sqlite_database(self, fake_get): if self._db_type != 'sqlite': self.skipTest('SQLite tests are disabled.') # replace real migrations with tests migrations. test_migrations = [ (100, 'test.functional.migrations.0100_init'), (101, 'test.functional.migrations.0101_add_column'), (102, 'test.functional.migrations.0102_create_table'), (103, 'test.functional.migrations.0103_not_ready' ) # that should not apply ] fake_get.return_value = test_migrations # create database with initial schema with patch.object(database, 'SCHEMA_VERSION', 100): db = Database('sqlite:///{}'.format(self.sqlite_db_file)) db.create_tables() db.close() # switch version and reconnect. Now both migrations should apply. with patch.object(database, 'SCHEMA_VERSION', 102): db = Database('sqlite:///{}'.format(self.sqlite_db_file)) try: # check column created by migration 101. db.connection.execute( 'SELECT column1 FROM datasets;').fetchall() # check table created by migration 102. db.connection.execute('SELECT column1 FROM table1;').fetchall() # db version changed to 102 self.assertEqual( db.connection.execute('PRAGMA user_version').fetchone()[0], 102) finally: db.close()
class ConvertPartitionTest(unittest.TestCase): def setUp(self): self.sqlite_db = Database('sqlite://') self.sqlite_db.create() def test_converts_partition_to_resource_dict(self): fake_partition = MagicMock(spec=Partition) fake_partition.dataset.vid = 'ds1vid' fake_partition.name = 'partition1' ret = _convert_partition(fake_partition) self.assertIn('package_id', ret) self.assertEqual(ret['package_id'], 'ds1vid') self.assertEqual(ret['name'], fake_partition.name) def test_converts_partition_content_to_csv(self): # prepare partition mock fake_partition = MagicMock(spec=Partition) fake_partition.dataset.vid = 'ds1vid' fake_partition.datafile.headers = ['col1', 'col2'] fake_iter = lambda: iter([{'col1': '1', 'col2': '1'}, {'col1': '2', 'col2': '2'}]) fake_partition.__iter__.side_effect = fake_iter # run. ret = _convert_partition(fake_partition) # check converted partition. self.assertIn('package_id', ret) self.assertEqual(ret['package_id'], 'ds1vid') self.assertIn('upload', ret) self.assertTrue(isinstance(ret['upload'], six.StringIO)) rows = [] reader = unicodecsv.reader(ret['upload']) for row in reader: rows.append(row) self.assertEqual(rows[0], ['col1', 'col2']) self.assertEqual(rows[1], ['1', '1']) self.assertEqual(rows[2], ['2', '2'])
def test_a_lot_of_tables(self): from contexttimer import Timer db = Database(self.dsn) db.open() ds = db.new_dataset(vid=self.dn[0], source='source', dataset='dataset') with Timer() as tr: for i in range(100): t = ds.new_table('table' + str(i)) for j in range(10): t.add_column('col' + str(j), datatype='integer') ds.commit() print(len(ds.tables), len(ds.tables) / tr.elapsed) self.assertEqual(100, len(ds.tables)) for t in ds.tables: self.assertEqual(11, len(t.columns)) # 10 + id column
def test_applies_new_migration_to_postgresql_database(self, fake_get): if self._db_type != 'postgres': self.skipTest('Postgres tests are disabled.') # replace real migrations with tests migrations. test_migrations = [ (100, 'test.test_orm.functional.migrations.0100_init'), (101, 'test.test_orm.functional.migrations.0101_add_column'), (102, 'test.test_orm.functional.migrations.0102_create_table'), (103, 'test.test_orm.functional.migrations.0103_not_ready') # that should not apply ] fake_get.return_value = test_migrations # create postgresql db postgres_test_db_dsn = self.config.library.database # PostgreSQLTestBase._create_postgres_test_db(get_runconfig())['test_db_dsn'] # populate database with initial schema with patch.object(database, 'SCHEMA_VERSION', 100): db = Database(postgres_test_db_dsn, engine_kwargs={'poolclass': NullPool}) db.create() db.close() # switch version and reconnect. Now both migrations should apply. with patch.object(database, 'SCHEMA_VERSION', 102): db = Database(postgres_test_db_dsn, engine_kwargs={'poolclass': NullPool}) try: # check column created by migration 101. db.connection\ .execute('SELECT column1 FROM {}.datasets;'.format(POSTGRES_SCHEMA_NAME))\ .fetchall() # check table created by migration 102. db.connection\ .execute('SELECT column1 FROM {}.table1;'.format(POSTGRES_SCHEMA_NAME))\ .fetchall() # db version changed to 102 db_version = db.connection\ .execute('SELECT version FROM {}.user_version;'.format(POSTGRES_SCHEMA_NAME))\ .fetchone()[0] self.assertEqual(db_version, 102) finally: db.close()
def test_creates_new_root_config(self): # prepare state db = Database('sqlite://') db.enable_delete = True # prevent _add_root_config call from create_tables with patch.object(db, '_add_config_root', Mock()): db.create_tables() query = db.session.query datasets = query(Dataset).all() self.assertEqual(len(datasets), 0) # testing # No call real _add_config_root and check result of the call. db._add_config_root() datasets = query(Dataset).all() self.assertEqual(len(datasets), 1) self.assertEqual(datasets[0].name, ROOT_CONFIG_NAME) self.assertEqual(datasets[0].vname, ROOT_CONFIG_NAME_V)
class ConvertDatasetTest(unittest.TestCase): def setUp(self): self.sqlite_db = Database('sqlite://') self.sqlite_db.create() def test_converts_bundle_to_dict(self): DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session ds1 = DatasetFactory() bundle = _get_fake_bundle(ds1) self.sqlite_db.commit() ret = _convert_bundle(bundle) self.assertIn('name', ret) self.assertIsNotNone(ret['name']) self.assertEqual(ret['name'], ds1.vid) self.assertIn('title', ret) self.assertIsNotNone(ret['title']) self.assertEqual(ret['title'], ds1.config.metadata.about.title) self.assertIn('author', ret) self.assertIn('author_email', ret) self.assertIn('maintainer', ret) self.assertIn('maintainer_email', ret) def test_extends_notes_with_dataset_documentation(self): DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session FileFactory._meta.sqlalchemy_session = self.sqlite_db.session ds1 = DatasetFactory() FileFactory(dataset=ds1, path='documentation.md', contents='### Dataset documentation.') self.sqlite_db.commit() bundle = _get_fake_bundle(ds1) ret = _convert_bundle(bundle) self.assertIn('### Dataset documentation.', ret['notes'])
def test_clone_returns_new_instance(self): db = Database('sqlite://') new_db = db.clone() self.assertNotEqual(db, new_db) self.assertEqual(db.dsn, new_db.dsn)
def test_sqlite_database_does_not_exists_if_file_not_found(self): db = Database('sqlite://no-such-file.db') self.assertFalse(db.exists())
def test_engine_creates_and_caches_sqlalchemy_engine(self, fake_validate): db = Database('sqlite://') self.assertIsInstance(db.engine, Engine) self.assertIsInstance(db._engine, Engine) self.assertEqual(len(fake_validate.mock_calls), 1)
def test_creates_and_caches_new_sqlalchemy_connection(self): db = Database('sqlite://') self.assertIsInstance(db.connection, SQLAlchemyConnection) self.assertIsInstance(db._connection, SQLAlchemyConnection)
def test_contains_sqlalchemy_session(self): db = Database('sqlite://') session = db.session self.assertIsInstance(session, Session)
def test_creates_session(self): db = Database('sqlite://') db.open() self.assertIsNotNone(db._session)
class ExportTest(unittest.TestCase): """ Tests export(bundle) function. """ def setUp(self): if not CKAN_CONFIG: raise EnvironmentError(MISSING_CREDENTIALS_MSG) self.sqlite_db = Database('sqlite://') self.sqlite_db.create() @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action') def test_creates_package_for_given_dataset(self, fake_call): DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session ds1 = DatasetFactory() ds1.config.metadata.about.access = 'public' bundle = _get_fake_bundle(ds1) export(bundle) # assert call to service was valid. called = False for call in fake_call.mock_calls: _, args, kwargs = call if (args[0] == 'package_create' and kwargs['data_dict'].get('name') == ds1.vid): called = True self.assertTrue(called) @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action') @patch('ambry.exporters.ckan.core._convert_partition') def test_creates_resource_for_each_partition_of_the_bundle(self, fake_convert, fake_call): DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session PartitionFactory._meta.sqlalchemy_session = self.sqlite_db.session ds1 = DatasetFactory() ds1.config.metadata.about.access = 'public' p1 = PartitionFactory(dataset=ds1) bundle = _get_fake_bundle(ds1, partitions=[p1]) fake_convert.return_value = {'name': p1.name, 'package_id': ds1.vid} export(bundle) # assert call to service was valid. called = False for call in fake_call.mock_calls: _, args, kwargs = call if (args[0] == 'resource_create' and kwargs['data_dict'].get('name') == p1.name and kwargs['data_dict'].get('package_id') == ds1.vid): called = True self.assertTrue(called) @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action') @patch('ambry.exporters.ckan.core._convert_schema') def test_creates_resource_for_schema(self, fake_convert, fake_call): DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session ds1 = DatasetFactory() ds1.config.metadata.about.access = 'public' bundle = _get_fake_bundle(ds1) fake_convert.return_value = {'name': 'schema', 'package_id': ds1.vid} export(bundle) # assert call to service was valid. called = False for call in fake_call.mock_calls: _, args, kwargs = call if (args[0] == 'resource_create' and kwargs['data_dict'].get('name') == 'schema' and kwargs['data_dict'].get('package_id') == ds1.vid): called = True self.assertTrue(called) @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action') def test_creates_resource_for_each_external_documentation(self, fake_call): DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session ds1 = DatasetFactory() ds1.config.metadata.about.access = 'public' # create two external documentations. # site1_descr = 'Descr1' site1_url = 'http://example.com/1' site2_descr = 'Descr2' site2_url = 'http://example.com/2' ds1.config.metadata.external_documentation.site1.description = site1_descr ds1.config.metadata.external_documentation.site1.url = site1_url ds1.config.metadata.external_documentation.site2.description = site2_descr ds1.config.metadata.external_documentation.site2.url = site2_url bundle = _get_fake_bundle(ds1) export(bundle) # assert call was valid resource_create_calls = {} for call in fake_call.mock_calls: _, args, kwargs = call if args[0] == 'resource_create': resource_create_calls[kwargs['data_dict']['name']] = kwargs['data_dict'] self.assertIn('site1', resource_create_calls) self.assertEqual(resource_create_calls['site1']['url'], site1_url) self.assertEqual(resource_create_calls['site1']['description'], site1_descr) self.assertIn('site2', resource_create_calls) self.assertEqual(resource_create_calls['site2']['url'], site2_url) self.assertEqual(resource_create_calls['site2']['description'], site2_descr) @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action') def test_raises_UnpublishedAccessError_error(self, fake_call): DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session ds1 = DatasetFactory() ds1.config.metadata.about.access = 'restricted' bundle = _get_fake_bundle(ds1) with self.assertRaises(UnpublishedAccessError): export(bundle)
def test_creates_new_dataset(self): db = Database('sqlite://') db.create() ds = db.new_dataset(vid='d111', source='source', dataset='dataset') self.assertTrue(ds.vid, 'd111')
def test_ignores_OperationalError_while_droping(self): db = Database('sqlite://') fake_drop = Mock(side_effect=OperationalError('select 1;', [], 'a')) with patch.object(db, 'drop', fake_drop): db.create_tables() fake_drop.assert_called_once_with()
def setUp(self): self.sqlite_db = Database('sqlite://') self.sqlite_db.create()
def setUp(self): if not CKAN_CONFIG: raise EnvironmentError(MISSING_CREDENTIALS_MSG) self.sqlite_db = Database('sqlite://') self.sqlite_db.create()
def test_initializes_path_and_driver(self): dsn = 'postgresql+psycopg2://ambry:[email protected]/exampledb' db = Database(dsn) self.assertEqual(db.path, '/exampledb') self.assertEqual(db.driver, 'postgres')
def test_contains_sqlachemy_metadata(self): db = Database('sqlite://') self.assertTrue(db.metadata.is_bound())
def test_contains_engine_inspector(self): db = Database('sqlite://') self.assertIsInstance(db.inspector, Inspector) self.assertEqual(db.engine, db.inspector.engine)