def test_s3_bad_bucket(s3_bucket, anchore_db): config = { "archive": { "compression": { "enabled": False }, "storage_driver": { "name": "s3", "config": { "access_key": test_s3_key, "secret_key": test_s3_secret_key, "url": test_s3_url, "region": None, "bucket": "testarchivebucket_does_not_exist", }, }, } } with pytest.raises(DriverConfigurationError) as err: object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) logger.info("Got expected error: {}".format(err.type))
def test_s3_auto(s3_bucket, anchore_db): os.environ['AWS_ACCESS_KEY'] = test_s3_key os.environ['AWS_SECRET_ACCESS_KEY'] = test_s3_secret_key config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 's3', 'config': { 'iamauto': True, 'bucket': 'testarchivebucket_does_not_exist' } } } } with pytest.raises(DriverConfigurationError) as err: object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True) logger.info('Got expected error: {}'.format(err.typee))
def test_swift_bad_creds(swift_container, anchore_db): config = { 'archive': { 'compression': { 'enabled': True }, 'storage_driver': { 'name': 'swift', 'config': { 'user': test_swift_user, 'key': 'badkey', 'auth': test_swift_auth_url, 'container': test_swift_container } } } } with pytest.raises(BadCredentialsError) as err: object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True) pytest.fail('Should have raised bad creds exception on init') logger.info('Got expected error: {}'.format(err.type))
def test_s3_create_bucket(s3_bucket, anchore_db): config = { "archive": { "compression": { "enabled": False }, "storage_driver": { "name": "s3", "config": { "create_bucket": True, "access_key": test_s3_key, "secret_key": test_s3_secret_key, "url": test_s3_url, "region": test_s3_region, "bucket": "testarchivebucket2", }, }, } } object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) run_test()
def test_swift_bad_container(swift_container, anchore_db): config = { 'archive': { 'compression': { 'enabled': True }, 'storage_driver': { 'name': 'swift', 'config': { 'user': test_swift_user, 'key': test_swift_key, 'auth': test_swift_auth_url, 'container': 'testarchive_does_not_exist' } } } } with pytest.raises(DriverConfigurationError) as err: object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True) logger.info('Got expected error: {}'.format(err.type))
def test_s3_create_bucket(s3_bucket, anchore_db): config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 's3', 'config': { 'create_bucket': True, 'access_key': test_s3_key, 'secret_key': test_s3_secret_key, 'url': test_s3_url, 'region': test_s3_region, 'bucket': 'testarchivebucket2' } } } } object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True) run_test()
def test_s3_bad_bucket(s3_bucket, anchore_db): config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 's3', 'config': { 'access_key': test_s3_key, 'secret_key': test_s3_secret_key, 'url': test_s3_url, 'region': None, 'bucket': 'testarchivebucket_does_not_exist' } } } } with pytest.raises(DriverConfigurationError) as err: object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True) logger.info('Got expected error: {}'.format(err.type))
def test_swift_bad_container(swift_container, anchore_db): config = { "archive": { "compression": { "enabled": True }, "storage_driver": { "name": "swift", "config": { "user": test_swift_user, "key": test_swift_key, "auth": test_swift_auth_url, "container": "testarchive_does_not_exist", }, }, } } with pytest.raises(DriverConfigurationError) as err: object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) logger.info("Got expected error: {}".format(err.type))
def test_legacy_db(anchore_db): # NOTE: legacy db driver does not support compression since it uses string type instead of binary for content storage config = { "archive": { "compression": { "enabled": False }, "storage_driver": { "name": "db", "config": {} }, } } object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) run_test()
def test_swift_bad_creds(swift_container, anchore_db): config = { "archive": { "compression": { "enabled": True }, "storage_driver": { "name": "swift", "config": { "user": test_swift_user, "key": "badkey", "auth": test_swift_auth_url, "container": test_swift_container, }, }, } } with pytest.raises(BadCredentialsError) as err: object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) pytest.fail("Should have raised bad creds exception on init") logger.info("Got expected error: {}".format(err.type))
def test_swift_create_container(swift_container, anchore_db): config = { "archive": { "compression": { "enabled": True }, "storage_driver": { "name": "swift", "config": { "user": test_swift_user, "key": test_swift_key, "auth": test_swift_auth_url, "container": "testarchive2", "create_container": True, }, }, } } object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) run_test()
def test_fs(anchore_db): config = { "archive": { "compression": { "enabled": True }, "storage_driver": { "name": "localfs", "config": { "archive_data_dir": "/tmp/archive_test/fs_driver" }, }, } } object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) run_test()
def test_swift_create_container(swift_container, anchore_db): config = { 'archive': { 'compression': { 'enabled': True }, 'storage_driver': { 'name': 'swift', 'config': { 'user': test_swift_user, 'key': test_swift_key, 'auth': test_swift_auth_url, 'container': 'testarchive2', 'create_container': True } } } } object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True) run_test()
def test_s3_auto(s3_bucket, anchore_db): os.environ["AWS_ACCESS_KEY"] = test_s3_key os.environ["AWS_SECRET_ACCESS_KEY"] = test_s3_secret_key config = { "archive": { "compression": { "enabled": False }, "storage_driver": { "name": "s3", "config": { "iamauto": True, "bucket": "testarchivebucket_does_not_exist", }, }, } } with pytest.raises(DriverConfigurationError) as err: object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) logger.info("Got expected error: {}".format(err.typee))
def archive_data_upgrade_005_006(): """ Upgrade the document archive data schema and move the data appropriately. Assumes both tables are in place (archive_document, archive_document_reference, object_storage) :return: """ from anchore_engine.db import LegacyArchiveDocument, session_scope, ObjectStorageMetadata from anchore_engine.subsys import object_store from anchore_engine.subsys.object_store.config import DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY from anchore_engine.configuration import localconfig config = localconfig.get_config() object_store.initialize(config.get('services', {}).get('catalog', {}), manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=(DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY), allow_legacy_fallback=True) client = anchore_engine.subsys.object_store.manager.get_manager( ).primary_client session_counter = 0 max_pending_session_size = 10000 with session_scope() as db_session: for doc in db_session.query(LegacyArchiveDocument.userId, LegacyArchiveDocument.bucket, LegacyArchiveDocument.archiveId, LegacyArchiveDocument.documentName, LegacyArchiveDocument.created_at, LegacyArchiveDocument.last_updated, LegacyArchiveDocument.record_state_key, LegacyArchiveDocument.record_state_val): meta = ObjectStorageMetadata(userId=doc[0], bucket=doc[1], archiveId=doc[2], documentName=doc[3], is_compressed=False, document_metadata=None, content_url=client.uri_for( userId=doc[0], bucket=doc[1], key=doc[2]), created_at=doc[4], last_updated=doc[5], record_state_key=doc[6], record_state_val=doc[6]) db_session.add(meta) session_counter += 1 if session_counter >= max_pending_session_size: db_session.flush() session_counter = 0
def test_db(anchore_db): config = { 'archive': { 'compression': { 'enabled': True }, 'storage_driver': { 'name': 'db2', 'config': {} } } } object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY], allow_legacy_fallback=False, force=True) run_test()
def test_legacy_db(anchore_db): # NOTE: legacy db driver does not support compression since it uses string type instead of binary for content storage config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 'db', 'config': {} } } } object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY], allow_legacy_fallback=False, force=True) run_test()
def test_s3_bad_creds(s3_bucket, anchore_db): config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 's3', 'config': { 'access_key': test_s3_key, 'secret_key': 'notrealkey', 'url': test_s3_url, 'region': test_s3_region, 'bucket': test_s3_bucket } } } } with pytest.raises(BadCredentialsError) as err: object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY], allow_legacy_fallback=False, force=True) pytest.fail('Should have gotten a bad creds error') logger.info('Got expected error: {}'.format(err.type)) config = { 'archive': { 'compression': { 'enabled': False }, 'storage_driver': { 'name': 's3', 'config': { 'access_key': test_s3_key, 'secret_key': 'notrealkey', 'url': test_s3_url, 'region': test_s3_region, 'bucket': test_s3_bucket } } } } with pytest.raises(BadCredentialsError) as err: object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY], allow_legacy_fallback=False, force=True) pytest.fail('Should have gotten a bad creds error') logger.info('Got expected error: {}'.format(err.type))
def run_test(src_client_config, dest_client_config): """ Common test path for all configs to test against :return: """ logger.info(('Running migration test from {} to {}'.format( src_client_config['name'], dest_client_config['name']))) #config = {'services': {'catalog': {'archive': {'compression': {'enabled': False}, 'storage_driver': src_client_config}}}} config = {'archive': src_client_config} object_store.initialize(config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True) add_data() src_config = { 'storage_driver': src_client_config, 'compression': { 'enabled': False } } dest_config = { 'storage_driver': dest_client_config, 'compression': { 'enabled': False } } migration.initiate_migration(src_config, dest_config, remove_on_source=True, do_lock=False) flush_data()
def check(configfile, analysis_archive): """ Test the configuration in the expected anchore-engine config location or override that and use the configuration file provided as an option. To test, the system will read and write a very small data document to the driver and then delete it on completion. """ db_conf = db_context() db_preflight(db_conf["params"], db_conf["retries"]) logger.info("Using config file {}".format(configfile)) sys_config = load_config(configfile=configfile) if sys_config: service_config = sys_config["services"]["catalog"] else: service_config = None if not service_config: logger.info( "No configuration file or content available. Cannot test archive driver configuration" ) fail_exit() if analysis_archive: try: object_store.initialize( service_config, manager_id=ANALYSIS_ARCHIVE_MANAGER_ID, config_keys=[ANALYSIS_ARCHIVE_MANAGER_ID], ) except: logger.error( 'No "analysis_archive" configuration section found in the configuration. To check a config that uses the default backend for analysis archive data, use the regular object storage check' ) fail_exit() mgr = object_store.get_manager(ANALYSIS_ARCHIVE_MANAGER_ID) else: object_store.initialize( service_config, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], ) mgr = object_store.get_manager() test_user_id = "test" test_bucket = "anchorecliconfigtest" test_archive_id = "cliconfigtest" test_data = "clitesting at {}".format( datetime.datetime.utcnow().isoformat()) logger.info( "Checking existence of test document with user_id = {}, bucket = {} and archive_id = {}" .format(test_user_id, test_bucket, test_archive_id)) if mgr.exists(test_user_id, test_bucket, test_archive_id): test_archive_id = "cliconfigtest2" if mgr.exists(test_user_id, test_bucket, test_archive_id): logger.error( "Found existing records for archive doc to test, aborting test to avoid overwritting any existing data" ) doexit(1) logger.info( "Creating test document with user_id = {}, bucket = {} and archive_id = {}" .format(test_user_id, test_bucket, test_archive_id)) result = mgr.put(test_user_id, test_bucket, test_archive_id, data=test_data) if not result: logger.warn( "Got empty response form archive PUT operation: {}".format(result)) logger.info("Checking document fetch") loaded = str(mgr.get(test_user_id, test_bucket, test_archive_id), "utf-8") if not loaded: logger.error( "Failed retrieving the written document. Got: {}".format(loaded)) doexit(ExitCode.obj_store_failed) if str(loaded) != test_data: logger.error( 'Failed retrieving the written document. Got something other than expected. Expected: "{}" Got: "{}"' .format(test_data, loaded)) doexit(ExitCode.obj_store_failed) logger.info("Removing test object") mgr.delete(test_user_id, test_bucket, test_archive_id) if mgr.exists(test_user_id, test_bucket, test_archive_id): logger.error("Found archive object after it should have been removed") doexit(ExitCode.obj_store_failed) logger.info("Archive config check completed successfully")
def test_s3_bad_creds(s3_bucket, anchore_db): config = { "archive": { "compression": { "enabled": False }, "storage_driver": { "name": "s3", "config": { "access_key": test_s3_key, "secret_key": "notrealkey", "url": test_s3_url, "region": test_s3_region, "bucket": test_s3_bucket, }, }, } } with pytest.raises(BadCredentialsError) as err: object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) pytest.fail("Should have gotten a bad creds error") logger.info("Got expected error: {}".format(err.type)) config = { "archive": { "compression": { "enabled": False }, "storage_driver": { "name": "s3", "config": { "access_key": test_s3_key, "secret_key": "notrealkey", "url": test_s3_url, "region": test_s3_region, "bucket": test_s3_bucket, }, }, } } with pytest.raises(BadCredentialsError) as err: object_store.initialize( config, check_db=False, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ], allow_legacy_fallback=False, force=True, ) pytest.fail("Should have gotten a bad creds error") logger.info("Got expected error: {}".format(err.type))
def check(configfile, analysis_archive): """ Test the configuration in the expected anchore-engine config location or override that and use the configuration file provided as an option. To test, the system will read and write a very small data document to the driver and then delete it on completion. :param configfile: :return: """ logger.info('Using config file {}'.format(configfile)) sys_config = load_config(configfile=configfile) if sys_config: service_config = sys_config['services']['catalog'] else: service_config = None if not service_config: logger.info( 'No configuration file or content available. Cannot test archive driver configuration' ) utils.doexit(2) if analysis_archive: try: object_store.initialize(service_config, manager_id=ANALYSIS_ARCHIVE_MANAGER_ID, config_keys=[ANALYSIS_ARCHIVE_MANAGER_ID]) except: logger.error( 'No "analysis_archive" configuration section found in the configuration. To check a config that uses the default backend for analysis archive data, use the regular object storage check' ) utils.doexit(2) mgr = object_store.get_manager(ANALYSIS_ARCHIVE_MANAGER_ID) else: object_store.initialize(service_config, manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID, config_keys=[ DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY ]) mgr = object_store.get_manager() test_user_id = 'test' test_bucket = 'anchorecliconfigtest' test_archive_id = 'cliconfigtest' test_data = 'clitesting at {}'.format( datetime.datetime.utcnow().isoformat()) logger.info( 'Checking existence of test document with user_id = {}, bucket = {} and archive_id = {}' .format(test_user_id, test_bucket, test_archive_id)) if mgr.exists(test_user_id, test_bucket, test_archive_id): test_archive_id = 'cliconfigtest2' if mgr.exists(test_user_id, test_bucket, test_archive_id): logger.error( 'Found existing records for archive doc to test, aborting test to avoid overwritting any existing data' ) utils.doexit(1) logger.info( 'Creating test document with user_id = {}, bucket = {} and archive_id = {}' .format(test_user_id, test_bucket, test_archive_id)) result = mgr.put(test_user_id, test_bucket, test_archive_id, data=test_data) if not result: logger.warn( 'Got empty response form archive PUT operation: {}'.format(result)) logger.info('Checking document fetch') loaded = str(mgr.get(test_user_id, test_bucket, test_archive_id), 'utf-8') if not loaded: logger.error( 'Failed retrieving the written document. Got: {}'.format(loaded)) utils.doexit(5) if str(loaded) != test_data: logger.error( 'Failed retrieving the written document. Got something other than expected. Expected: "{}" Got: "{}"' .format(test_data, loaded)) utils.doexit(5) logger.info('Removing test object') mgr.delete(test_user_id, test_bucket, test_archive_id) if mgr.exists(test_user_id, test_bucket, test_archive_id): logger.error('Found archive object after it should have been removed') utils.doexit(5) logger.info('Archive config check completed successfully')