def test_db(self): config = { 'archive': { 'compression': { 'enabled': True }, 'storage_driver': { 'name': 'db2', 'config': {} } } } archive.initialize(config, force=True) self.run_test()
def archive_data_upgrade_005_006(): """ Upgrade the document archive data schema and move the data appropriately. Assumes both tables are in place (archive_document, archive_document_reference, object_storage) :return: """ from anchore_engine.db import ArchiveDocument, session_scope, ArchiveMetadata from anchore_engine.subsys import archive from anchore_engine.subsys.archive import operations from anchore_engine.configuration import localconfig config = localconfig.get_config() archive.initialize(config.get('services', {}).get('catalog', {})) client = operations.get_archive().primary_client session_counter = 0 max_pending_session_size = 10000 with session_scope() as db_session: for doc in db_session.query( ArchiveDocument.userId, ArchiveDocument.bucket, ArchiveDocument.archiveId, ArchiveDocument.documentName, ArchiveDocument.created_at, ArchiveDocument.last_updated, ArchiveDocument.record_state_key, ArchiveDocument.record_state_val): meta = ArchiveMetadata(userId=doc[0], bucket=doc[1], archiveId=doc[2], documentName=doc[3], is_compressed=False, document_metadata=None, content_url=client.uri_for(userId=doc[0], bucket=doc[1], key=doc[2]), created_at=doc[4], last_updated=doc[5], record_state_key=doc[6], record_state_val=doc[6]) db_session.add(meta) session_counter += 1 if session_counter >= max_pending_session_size: db_session.flush() session_counter = 0
def test_legacy_db(self): # NOTE: legacy db driver does not support compression since it uses string type instead of binary for content storage config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 'db', 'config': {} } } } archive.initialize(config, force=True) self.run_test()
def test_fs(self): config = { 'archive': { 'compression': { 'enabled': True }, 'storage_driver': { 'name': 'localfs', 'config': { 'archive_data_dir': '/tmp/archive_test/fs_driver' } } } } archive.initialize(config, force=True) self.run_test()
def test_s3_auto(self): config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 's3', 'config': { 'iamauto': True, 'bucket': 'testarchivebucket_does_not_exist' } } } } with self.assertRaises(DriverConfigurationError) as err: archive.initialize(config, force=True) print(('Got expected error: {}'.format(err.exception.message)))
def test_s3_bad_creds(self): config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 's3', 'config': { 'access_key': test_key, 'secret_key': 'notrealkey', 'url': 'http://localhost:9000', 'region': None, 'bucket': 'testarchivebucket' } } } } with self.assertRaises(BadCredentialsError) as err: archive.initialize(config, force=True) print(('Got expected error: {}'.format(err.exception.message))) config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 's3', 'config': { 'access_key': test_key, 'secret_key': 'notrealkey', 'url': 'http://localhost:9000', 'region': None, 'bucket': 'testarchivebucket' } } } } with self.assertRaises(BadCredentialsError) as err: archive.initialize(config, force=True) print(('Got expected error: {}'.format(err.exception.message)))
def run_test(self, src_client_config, dest_client_config): """ Common test path for all configs to test against :return: """ print(('Running migration test from {} to {}'.format( src_client_config['name'], dest_client_config['name']))) archive.initialize({ 'services': { 'catalog': { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': src_client_config } } } }) self.add_data() src_config = { 'storage_driver': src_client_config, 'compression': { 'enabled': False } } dest_config = { 'storage_driver': dest_client_config, 'compression': { 'enabled': False } } migration.initiate_migration(src_config, dest_config, remove_on_source=True, do_lock=False) self.flush_data()
def test_s3(self): config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 's3', 'config': { 'access_key': test_key, 'secret_key': test_secret, 'url': 'http://localhost:9000', 'region': None, 'bucket': 'testarchivebucket' } } } } archive.initialize(config, force=True) self.run_test()
def test_swift(self): config = { 'archive': { 'compression': { 'enabled': True }, 'storage_driver': { 'name': 'swift', 'config': { 'user': '******', 'key': 'testing', 'auth': 'http://localhost:8080/auth/v1.0', 'container': 'testarchive' } } } } archive.initialize(config, force=True) self.run_test()
def test_swift_bad_container(self): config = { 'archive': { 'compression': { 'enabled': True }, 'storage_driver': { 'name': 'swift', 'config': { 'user': '******', 'key': 'testing123', 'auth': 'http://localhost:8080/auth/v1.0', 'container': 'testarchive_does_not_exist' } } } } with self.assertRaises(DriverConfigurationError) as err: archive.initialize(config, force=True) print(('Got expected error: {}'.format(err.exception.message)))
def check(configfile): """ Test the configuration in the expected anchore-engine config location or override that and use the configuration file provided as an option. To test, the system will read and write a very small data document to the driver and then delete it on completion. :param configfile: :return: """ logger.info('Using config file {}'.format(configfile)) sys_config = load_config(configfile=configfile) if sys_config: service_config = sys_config['services']['catalog'] else: service_config = None if not service_config: logger.error( 'No configuration file or content available. Cannot test archive driver configuration' ) utils.doexit(2) archive.initialize(service_config) test_user_id = 'test' test_bucket = 'anchorecliconfigtest' test_archive_id = 'cliconfigtest' test_data = 'clitesting at {}'.format( datetime.datetime.utcnow().isoformat()) logger.info( 'Checking existence of test document with user_id = {}, bucket = {} and archive_id = {}' .format(test_user_id, test_bucket, test_archive_id)) if archive.exists(test_user_id, test_bucket, test_archive_id): test_archive_id = 'cliconfigtest2' if archive.exists(test_user_id, test_bucket, test_archive_id): logger.error( 'Found existing records for archive doc to test, aborting test to avoid overwritting any existing data' ) utils.doexit(1) logger.info( 'Creating test document with user_id = {}, bucket = {} and archive_id = {}' .format(test_user_id, test_bucket, test_archive_id)) result = archive.put(test_user_id, test_bucket, test_archive_id, data=test_data) if not result: logger.warn( 'Warning: Got empty response form archive PUT operation: {}'. format(result)) logger.info('Checking document fetch') loaded = str(archive.get(test_user_id, test_bucket, test_archive_id), 'utf-8') if not loaded: logger.error( 'Failed retrieving the written document. Got: {}'.format(loaded)) utils.doexit(5) if str(loaded) != test_data: logger.error( 'Failed retrieving the written document. Got something other than expected. Expected: "{}" Got: "{}"' .format(test_data, loaded)) utils.doexit(5) logger.info('Removing test object') archive.delete(test_user_id, test_bucket, test_archive_id) if archive.exists(test_user_id, test_bucket, test_archive_id): logger.error('Found archive object after it should have been removed') utils.doexit(5) logger.info('Archive config check completed successfully')