Beispiel #1
0
    def test_applies_new_migration_to_postgresql_database(self, fake_get):
        if self._db_type != 'postgres':
            self.skipTest('Postgres tests are disabled.')
        # replace real migrations with tests migrations.
        test_migrations = [
            (100, 'test.test_orm.functional.migrations.0100_init'),
            (101, 'test.test_orm.functional.migrations.0101_add_column'),
            (102, 'test.test_orm.functional.migrations.0102_create_table'),
            (103, 'test.test_orm.functional.migrations.0103_not_ready'
             )  # that should not apply
        ]

        fake_get.return_value = test_migrations

        # create postgresql db
        postgres_test_db_dsn = self.config.library.database
        # PostgreSQLTestBase._create_postgres_test_db(get_runconfig())['test_db_dsn']

        # populate database with initial schema
        with patch.object(database, 'SCHEMA_VERSION', 100):
            db = Database(postgres_test_db_dsn,
                          engine_kwargs={'poolclass': NullPool})
            db.create()
            db.close()

        # switch version and reconnect. Now both migrations should apply.
        with patch.object(database, 'SCHEMA_VERSION', 102):
            db = Database(postgres_test_db_dsn,
                          engine_kwargs={'poolclass': NullPool})
            try:
                # check column created by migration 101.
                db.connection\
                    .execute('SELECT column1 FROM {}.datasets;'.format(POSTGRES_SCHEMA_NAME))\
                    .fetchall()

                # check table created by migration 102.
                db.connection\
                    .execute('SELECT column1 FROM {}.table1;'.format(POSTGRES_SCHEMA_NAME))\
                    .fetchall()

                # db version changed to 102
                db_version = db.connection\
                    .execute('SELECT version FROM {}.user_version;'.format(POSTGRES_SCHEMA_NAME))\
                    .fetchone()[0]
                self.assertEqual(db_version, 102)
            finally:
                db.close()
    def test_applies_new_migration_to_postgresql_database(self, fake_get):
        if self._db_type != 'postgres':
            self.skipTest('Postgres tests are disabled.')
        # replace real migrations with tests migrations.
        test_migrations = [
            (100, 'test.test_orm.functional.migrations.0100_init'),
            (101, 'test.test_orm.functional.migrations.0101_add_column'),
            (102, 'test.test_orm.functional.migrations.0102_create_table'),
            (103, 'test.test_orm.functional.migrations.0103_not_ready')  # that should not apply
        ]

        fake_get.return_value = test_migrations

        # create postgresql db
        postgres_test_db_dsn = self.config.library.database
        # PostgreSQLTestBase._create_postgres_test_db(get_runconfig())['test_db_dsn']

        # populate database with initial schema
        with patch.object(database, 'SCHEMA_VERSION', 100):
            db = Database(postgres_test_db_dsn, engine_kwargs={'poolclass': NullPool})
            db.create()
            db.close()

        # switch version and reconnect. Now both migrations should apply.
        with patch.object(database, 'SCHEMA_VERSION', 102):
            db = Database(postgres_test_db_dsn, engine_kwargs={'poolclass': NullPool})
            try:
                # check column created by migration 101.
                db.connection\
                    .execute('SELECT column1 FROM {}.datasets;'.format(POSTGRES_SCHEMA_NAME))\
                    .fetchall()

                # check table created by migration 102.
                db.connection\
                    .execute('SELECT column1 FROM {}.table1;'.format(POSTGRES_SCHEMA_NAME))\
                    .fetchall()

                # db version changed to 102
                db_version = db.connection\
                    .execute('SELECT version FROM {}.user_version;'.format(POSTGRES_SCHEMA_NAME))\
                    .fetchone()[0]
                self.assertEqual(db_version, 102)
            finally:
                db.close()
Beispiel #3
0
class ConvertSchemaTest(unittest.TestCase):
    """ tests _convert_schema function. """

    def setUp(self):
        self.sqlite_db = Database('sqlite://')
        self.sqlite_db.create()

    def _test_converts_schema_to_resource_dict(self):
        DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session
        FileFactory._meta.sqlalchemy_session = self.sqlite_db.session

        ds1 = DatasetFactory()
        partition1 = PartitionFactory(dataset=ds1)
        self.sqlite_db.commit()
        partition1._datafile = MagicMock()
        ret = _convert_partition(partition1)
        self.assertIn('package_id', ret)
        self.assertEqual(ret['package_id'], ds1.vid)
        self.assertEqual(ret['name'], partition1.name)
Beispiel #4
0
class ConvertPartitionTest(unittest.TestCase):

    def setUp(self):
        self.sqlite_db = Database('sqlite://')
        self.sqlite_db.create()

    def test_converts_partition_to_resource_dict(self):
        fake_partition = MagicMock(spec=Partition)
        fake_partition.dataset.vid = 'ds1vid'
        fake_partition.name = 'partition1'
        ret = _convert_partition(fake_partition)
        self.assertIn('package_id', ret)
        self.assertEqual(ret['package_id'], 'ds1vid')
        self.assertEqual(ret['name'], fake_partition.name)

    def test_converts_partition_content_to_csv(self):
        # prepare partition mock
        fake_partition = MagicMock(spec=Partition)
        fake_partition.dataset.vid = 'ds1vid'
        fake_partition.datafile.headers = ['col1', 'col2']
        fake_iter = lambda: iter([{'col1': '1', 'col2': '1'}, {'col1': '2', 'col2': '2'}])
        fake_partition.__iter__.side_effect = fake_iter

        # run.
        ret = _convert_partition(fake_partition)

        # check converted partition.
        self.assertIn('package_id', ret)
        self.assertEqual(ret['package_id'], 'ds1vid')
        self.assertIn('upload', ret)
        self.assertTrue(isinstance(ret['upload'], six.StringIO))
        rows = []
        reader = unicodecsv.reader(ret['upload'])
        for row in reader:
            rows.append(row)
        self.assertEqual(rows[0], ['col1', 'col2'])
        self.assertEqual(rows[1], ['1', '1'])
        self.assertEqual(rows[2], ['2', '2'])
Beispiel #5
0
class ConvertDatasetTest(unittest.TestCase):
    def setUp(self):
        self.sqlite_db = Database('sqlite://')
        self.sqlite_db.create()

    def test_converts_bundle_to_dict(self):
        DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session

        ds1 = DatasetFactory()
        bundle = _get_fake_bundle(ds1)
        self.sqlite_db.commit()
        ret = _convert_bundle(bundle)
        self.assertIn('name', ret)
        self.assertIsNotNone(ret['name'])
        self.assertEqual(ret['name'], ds1.vid)

        self.assertIn('title', ret)
        self.assertIsNotNone(ret['title'])
        self.assertEqual(ret['title'], ds1.config.metadata.about.title)

        self.assertIn('author', ret)
        self.assertIn('author_email', ret)
        self.assertIn('maintainer', ret)
        self.assertIn('maintainer_email', ret)

    def test_extends_notes_with_dataset_documentation(self):
        DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session
        FileFactory._meta.sqlalchemy_session = self.sqlite_db.session

        ds1 = DatasetFactory()
        FileFactory(dataset=ds1, path='documentation.md', contents='### Dataset documentation.')
        self.sqlite_db.commit()
        bundle = _get_fake_bundle(ds1)
        ret = _convert_bundle(bundle)

        self.assertIn('### Dataset documentation.', ret['notes'])
Beispiel #6
0
 def test_creates_new_dataset(self):
     db = Database('sqlite://')
     db.create()
     ds = db.new_dataset(vid='d111', source='source', dataset='dataset')
     self.assertTrue(ds.vid, 'd111')
Beispiel #7
0
class ExportTest(unittest.TestCase):
    """ Tests export(bundle) function. """

    def setUp(self):
        if not CKAN_CONFIG:
            raise EnvironmentError(MISSING_CREDENTIALS_MSG)
        self.sqlite_db = Database('sqlite://')
        self.sqlite_db.create()

    @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action')
    def test_creates_package_for_given_dataset(self, fake_call):
        DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session
        ds1 = DatasetFactory()
        ds1.config.metadata.about.access = 'public'
        bundle = _get_fake_bundle(ds1)
        export(bundle)

        # assert call to service was valid.
        called = False
        for call in fake_call.mock_calls:
            _, args, kwargs = call
            if (args[0] == 'package_create'
                    and kwargs['data_dict'].get('name') == ds1.vid):
                called = True
        self.assertTrue(called)

    @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action')
    @patch('ambry.exporters.ckan.core._convert_partition')
    def test_creates_resource_for_each_partition_of_the_bundle(self, fake_convert, fake_call):
        DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session
        PartitionFactory._meta.sqlalchemy_session = self.sqlite_db.session

        ds1 = DatasetFactory()
        ds1.config.metadata.about.access = 'public'
        p1 = PartitionFactory(dataset=ds1)
        bundle = _get_fake_bundle(ds1, partitions=[p1])
        fake_convert.return_value = {'name': p1.name, 'package_id': ds1.vid}
        export(bundle)

        # assert call to service was valid.
        called = False
        for call in fake_call.mock_calls:
            _, args, kwargs = call
            if (args[0] == 'resource_create'
                    and kwargs['data_dict'].get('name') == p1.name
                    and kwargs['data_dict'].get('package_id') == ds1.vid):
                called = True
        self.assertTrue(called)

    @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action')
    @patch('ambry.exporters.ckan.core._convert_schema')
    def test_creates_resource_for_schema(self, fake_convert, fake_call):
        DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session

        ds1 = DatasetFactory()
        ds1.config.metadata.about.access = 'public'
        bundle = _get_fake_bundle(ds1)
        fake_convert.return_value = {'name': 'schema', 'package_id': ds1.vid}
        export(bundle)

        # assert call to service was valid.
        called = False
        for call in fake_call.mock_calls:
            _, args, kwargs = call
            if (args[0] == 'resource_create'
                    and kwargs['data_dict'].get('name') == 'schema'
                    and kwargs['data_dict'].get('package_id') == ds1.vid):
                called = True
        self.assertTrue(called)

    @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action')
    def test_creates_resource_for_each_external_documentation(self, fake_call):
        DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session

        ds1 = DatasetFactory()
        ds1.config.metadata.about.access = 'public'

        # create two external documentations.
        #
        site1_descr = 'Descr1'
        site1_url = 'http://example.com/1'
        site2_descr = 'Descr2'
        site2_url = 'http://example.com/2'

        ds1.config.metadata.external_documentation.site1.description = site1_descr
        ds1.config.metadata.external_documentation.site1.url = site1_url

        ds1.config.metadata.external_documentation.site2.description = site2_descr
        ds1.config.metadata.external_documentation.site2.url = site2_url

        bundle = _get_fake_bundle(ds1)
        export(bundle)

        # assert call was valid
        resource_create_calls = {}
        for call in fake_call.mock_calls:
            _, args, kwargs = call
            if args[0] == 'resource_create':
                resource_create_calls[kwargs['data_dict']['name']] = kwargs['data_dict']
        self.assertIn('site1', resource_create_calls)
        self.assertEqual(resource_create_calls['site1']['url'], site1_url)
        self.assertEqual(resource_create_calls['site1']['description'], site1_descr)

        self.assertIn('site2', resource_create_calls)
        self.assertEqual(resource_create_calls['site2']['url'], site2_url)
        self.assertEqual(resource_create_calls['site2']['description'], site2_descr)

    @patch('ambry.exporters.ckan.core.ckanapi.RemoteCKAN.call_action')
    def test_raises_UnpublishedAccessError_error(self, fake_call):
        DatasetFactory._meta.sqlalchemy_session = self.sqlite_db.session

        ds1 = DatasetFactory()
        ds1.config.metadata.about.access = 'restricted'
        bundle = _get_fake_bundle(ds1)
        with self.assertRaises(UnpublishedAccessError):
            export(bundle)
Beispiel #8
0
 def test_creates_new_dataset(self):
     db = Database('sqlite://')
     db.create()
     ds = db.new_dataset(vid='d111', source='source', dataset='dataset')
     self.assertTrue(ds.vid, 'd111')