def clean(self): super(RepoManagerTests, self).clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoDistributor.get_collection().remove() dispatch.TaskStatus.objects().delete()
def clean(self): super(RepoDistributorManagerTests, self).clean() mock_plugins.MOCK_DISTRIBUTOR.reset_mock() Repo.get_collection().remove() RepoDistributor.get_collection().remove()
def clean(self): super(RepoSyncConduitTests, self).clean() types_database.clean() mock_plugins.reset() RepoContentUnit.get_collection().remove() Repo.get_collection().remove()
def test_import(self): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() repo = Repository(self.REPO_ID) cfg = { 'protocol':'file', 'http':{'alias':self.alias}, 'https':{'alias':self.alias}, 'file':{'alias':self.alias}, } conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = 'file://' + publisher.manifest_path() cfg = dict(manifest_url=manifest_url, strategy=constants.MIRROR_STRATEGY) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) importer.sync_repo(repo, conduit, cfg) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def tearDown(self): base.PulpWebserviceTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def tearDown(self): super(BindManagerTests, self).tearDown() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def clean(self): super(RepoPublishConduitTests, self).clean() mock_plugins.reset() Repo.get_collection().remove() RepoDistributor.get_collection().remove()
def clean(self): Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def setUp(self): super(Migration0004Tests, self).setUp() # Special way to import modules that start with a number self.migration = _import_all_the_way( 'pulp_rpm.plugins.migrations.0004_pkg_group_category_repoid') factory.initialize() types_db.update_database([TYPE_DEF_GROUP, TYPE_DEF_CATEGORY]) # Create the repositories necessary for the tests self.source_repo_id = 'source-repo' # where units were copied from with the bad code self.dest_repo_id = 'dest-repo' # where bad units were copied to source_repo = Repo(self.source_repo_id, '') Repo.get_collection().insert(source_repo, safe=True) dest_repo = Repo(self.dest_repo_id, '') Repo.get_collection().insert(dest_repo, safe=True) source_importer = RepoImporter(self.source_repo_id, 'yum_importer', 'yum_importer', {}) RepoImporter.get_collection().insert(source_importer, safe=True) dest_importer = RepoImporter(self.dest_repo_id, 'yum_importer', 'yum_importer', {}) RepoImporter.get_collection().insert(dest_importer, safe=True)
def tearDown(self): PulpItineraryTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def tearDown(self): super(ConsumerTest, self).tearDown() Consumer.get_collection().remove(safe=True) Repo.get_collection().remove(safe=True) RepoDistributor.get_collection().remove(safe=True) Bind.get_collection().remove(safe=True) mock_plugins.reset()
def setUp(self): base.PulpWebserviceTests.setUp(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install()
def tearDown(self): super(self.__class__, self).tearDown() Consumer.get_collection().remove() ConsumerGroup.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def setUp(self): super(BindManagerTests, self).setUp() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install()
def tearDown(self): super(TestDoSync, self).tearDown() mock_plugins.reset() manager_factory.reset() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoSyncResult.get_collection().remove() MockRepoPublishManager.reset()
def clean(self): super(RepoSyncManagerTests, self).clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoSyncResult.get_collection().remove() # Reset the state of the mock's tracker variables MockRepoPublishManager.reset()
def tearDown(self): PulpRPMTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoContentUnit.get_collection().remove() RepoDistributor.get_collection().remove() database.clean() plugins.finalize()
def setUp(self): super(QueryTests, self).setUp() Repo.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() plugin_api._create_manager() plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, {})
def tearDown(self): super(Migration0004Tests, self).tearDown() # Delete any sample data added for the test types_db.clean() RepoContentUnit.get_collection().remove() RepoImporter.get_collection().remove() Repo.get_collection().remove()
def setUp(self): super(self.__class__, self).setUp() Consumer.get_collection().remove() ConsumerGroup.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install()
def setUp(self): PulpItineraryTests.setUp(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install() mock_agent.install()
def clean(self): super(DependencyManagerTests, self).clean() database.clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() mock_plugins.MOCK_IMPORTER.resolve_dependencies.return_value = None
def tearDown(self): super(BaseProfilerConduitTests, self).tearDown() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() RepoContentUnit.get_collection().remove() UnitProfile.get_collection().remove() typedb.clean() factory.reset()
def setUp(self): super(QueryTests, self).setUp() Repo.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() for type_id in ALL_TYPES: unit_db.type_definition = Mock(return_value=dict(id=type_id, unit_key=UNIT_METADATA)) unit_db.type_units_unit_key = Mock(return_value=['A', 'B', 'C', 'N']) plugin_api._create_manager() plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, {})
def clean(self, units_only=False, plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if units_only: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if plugins: plugin_api._MANAGER.distributors.plugins = {}
def tearDown(self): WebTest.tearDown(self) shutil.rmtree(self.parentfs) shutil.rmtree(self.childfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def setUp(self): super(BaseProfilerConduitTests, self).setUp() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() RepoContentUnit.get_collection().remove() UnitProfile.get_collection().remove() plugin_api._create_manager() typedb.update_database([self.TYPE_1_DEF, self.TYPE_2_DEF]) mock_plugins.install()
def clean(self, just_units=False, purge_plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if just_units: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if purge_plugins: plugin_api._MANAGER.importers.plugins = {} plugin_api._MANAGER.distributors.plugins = {}
def test_migrate_duplicates_doesnt_delete_from_source_repo(self): """ This tests the correct behavior when we try to change the repo_id on an object, and end up causing a duplicate error due to our uniqueness constraint. It also makes sure the units are not deleted from the source repository if they are in the source repository. """ # Let's put two units here with the same IDs with two different repo_ids, and the run the # migration. source_repo_group_id = add_unit('group', self.source_repo_id, ids.TYPE_ID_PKG_GROUP) dest_repo_group_id = add_unit('group', self.dest_repo_id, ids.TYPE_ID_PKG_GROUP) # Associate the source_repo_group_id with both source and destination repos associate_unit(source_repo_group_id, self.source_repo_id, ids.TYPE_ID_PKG_GROUP) associate_unit(source_repo_group_id, self.dest_repo_id, ids.TYPE_ID_PKG_GROUP) associate_unit(dest_repo_group_id, self.dest_repo_id, ids.TYPE_ID_PKG_GROUP) # Migrate should not cause a DuplicateKeyError self.migration.migrate() # Verify that both groups remain, because the migration should not have removed either group_collection = types_db.type_units_collection(ids.TYPE_ID_PKG_GROUP) all_groups = list(group_collection.find()) self.assertEqual(len(all_groups), 2) self.assertEqual( group_collection.find({'id': 'group', 'repo_id': self.dest_repo_id}).count(), 1) self.assertEqual( group_collection.find({'id': 'group', 'repo_id': self.source_repo_id}).count(), 1) # Let's make sure that there are two associations, and that they are correct. query_manager = factory.repo_unit_association_query_manager() dest_units = query_manager.get_units(self.dest_repo_id) self.assertEqual(len(dest_units), 1) dest_unit = dest_units[0] self.assertEqual(dest_unit['unit_type_id'], ids.TYPE_ID_PKG_GROUP) self.assertEqual(dest_unit['unit_id'], dest_repo_group_id) source_units = query_manager.get_units(self.source_repo_id) self.assertEqual(len(source_units), 1) source_unit = source_units[0] self.assertEqual(source_unit['unit_type_id'], ids.TYPE_ID_PKG_GROUP) self.assertEqual(source_unit['unit_id'], source_repo_group_id) # Verify the repo counts self.assertEqual( Repo.get_collection().find({'id': 'source-repo'})[0]['content_unit_counts'], {'package_group': 1}) self.assertEqual(Repo.get_collection().find({'id': 'dest-repo'})[0]['content_unit_counts'], {'package_group': 1})
def test_sync_with_sync_config_override(self): """ Tests a sync when passing in an individual config of override options. """ # Setup importer_config = {'thor': 'thor'} self.repo_manager.create_repo('repo-1') self.importer_manager.set_importer('repo-1', 'mock-importer', importer_config) # Test sync_config_override = {'clint': 'hawkeye'} self.sync_manager.sync('repo-1', sync_config_override=sync_config_override) # Verify repo = Repo.get_collection().find_one({'id': 'repo-1'}) repo_importer = RepoImporter.get_collection().find_one({'repo_id': 'repo-1', 'id': 'mock-importer'}) # Database self.assertTrue(repo_importer['last_sync'] is not None) self.assertTrue(assert_last_sync_time(repo_importer['last_sync'])) # Call into the importer sync_args = mock_plugins.MOCK_IMPORTER.sync_repo.call_args[0] self.assertEqual(repo['id'], sync_args[0].id) self.assertTrue(sync_args[1] is not None) self.assertEqual({}, sync_args[2].plugin_config) self.assertEqual(importer_config, sync_args[2].repo_plugin_config) self.assertEqual(sync_config_override, sync_args[2].override_config)
def test_import_unit_files_already_exist(self, *mocks): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() parent_content = os.path.join(self.parentfs, 'content') child_content = os.path.join(self.childfs, 'content') shutil.copytree(parent_content, child_content) # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with( configuration.flatten())
def test_import_cached_manifest_units_invalid(self, *unused): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) with open(os.path.join(working_dir, UNITS_FILE_NAME), 'w+') as fp: fp.write('invalid-units') # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def find_by_id_list(self, repo_id_list): """ Returns serialized versions of all of the given repositories. Any IDs that do not refer to valid repositories are ignored and will not raise an error. @param repo_id_list: list of repo IDs to fetch @type repo_id_list: list of str @return: list of serialized repositories @rtype: list of dict """ repos = list(Repo.get_collection().find({'id': {'$in': repo_id_list}})) return repos
def test_create_i18n(self): # Setup i18n_text = 'BrasÃlia' # Test self.manager.create_repo('repo-i18n', display_name=i18n_text, description=i18n_text) # Verify repo = Repo.get_collection().find_one({'id': 'repo-i18n'}) self.assertTrue(repo is not None) self.assertEqual(encode_unicode(repo['display_name']), i18n_text) self.assertEqual(encode_unicode(repo['description']), i18n_text)
def test_create_and_configure_repo(self): """ Tests the successful creation of a repo + plugins. """ # Setup repo_id = 'full' display_name = 'Full' description = 'Full Test' notes = {'n': 'n'} importer_type_id = 'mock-importer' importer_repo_plugin_config = {'i': 'i'} distributors = [ dict(distributor_type_id='mock-distributor', distributor_config={'d': 'd'}, auto_publish=True, distributor_id='dist1'), dict(distributor_type_id='mock-distributor', distributor_config={'d': 'd'}, auto_publish=True, distributor_id='dist2') ] # Test created = self.manager.create_and_configure_repo( repo_id, display_name, description, notes, importer_type_id, importer_repo_plugin_config, distributors) # Verify self.assertEqual(created['id'], repo_id) repo = Repo.get_collection().find_one({'id': repo_id}) self.assertEqual(repo['id'], repo_id) self.assertEqual(repo['display_name'], display_name) self.assertEqual(repo['description'], description) self.assertEqual(repo['notes'], notes) importer = RepoImporter.get_collection().find_one({'repo_id': repo_id}) self.assertEqual(importer['importer_type_id'], importer_type_id) self.assertEqual(importer['config'], importer_repo_plugin_config) for d in distributors: distributor = RepoDistributor.get_collection().find_one( {'id': d['distributor_id']}) self.assertEqual(distributor['repo_id'], repo_id) self.assertEqual(distributor['distributor_type_id'], d['distributor_type_id']) self.assertEqual(distributor['auto_publish'], d['auto_publish']) self.assertEqual(distributor['config'], d['distributor_config'])
def delete_repo(self, repo_id): """ Deletes the given repository, optionally requesting the associated importer clean up any content in the repository. @param repo_id: identifies the repo being deleted @type repo_id: str @raise MissingResource: if the given repo does not exist @raise OperationFailed: if any part of the delete process fails; the exception will contain information on which sections failed """ # Validation found = Repo.get_collection().find_one({'id' : repo_id}) if found is None: raise MissingResource(repo_id) # With so much going on during a delete, it's possible that a few things # could go wrong while others are successful. We track lesser errors # that shouldn't abort the entire process until the end and then raise # an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user # will have to look at the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments # Remove and scheduled activities scheduler = dispatch_factory.scheduler() importer_manager = manager_factory.repo_importer_manager() importers = importer_manager.get_importers(repo_id) if importers: for schedule_id in importer_manager.list_sync_schedules(repo_id): scheduler.remove(schedule_id) distributor_manager = manager_factory.repo_distributor_manager() for distributor in distributor_manager.get_distributors(repo_id): for schedule_id in distributor_manager.list_publish_schedules(repo_id, distributor['id']): scheduler.remove(schedule_id) # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id' : repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _LOG.exception('Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append( (_('Importer Delete Error'), e.args) )
def remove_distributor(repo_id, distributor_id): """ Removes a distributor from a repository. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor to delete @type distributor_id: str @raise MissingResource: if repo_id doesn't correspond to a valid repo @raise MissingResource: if there is no distributor with the given ID """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repository=repo_id) repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(distributor=distributor_id) # remove schedules RepoPublishScheduleManager().delete_by_distributor_id( repo_id, repo_distributor['id']) # Call the distributor's cleanup method distributor_type_id = repo_distributor['distributor_type_id'] distributor_instance, plugin_config = plugin_api.get_distributor_by_id( distributor_type_id) call_config = PluginCallConfiguration(plugin_config, repo_distributor['config']) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( distributor_type_id, repo_id) distributor_instance.distributor_removed(transfer_repo, call_config) # Update the database to reflect the removal distributor_coll.remove({'_id': repo_distributor['_id']}, safe=True)
def validate_importer_config(repo_id, importer_type_id, importer_config): """ Validate an importer configuration. This validates that the repository and importer type exist as these are both required to validate the configuration. :param repo_id: identifies the repo :type repo_id: str :param importer_type_id: identifies the type of importer being added; must correspond to an importer loaded at server startup :type importer_type_id: str :param importer_config: configuration values for the importer; may be None :type importer_config: dict """ repo_coll = Repo.get_collection() repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) if not plugin_api.is_valid_importer(importer_type_id): raise PulpCodedValidationException( error_code=error_codes.PLP1008, importer_type_id=importer_type_id) importer_instance, plugin_config = plugin_api.get_importer_by_id( importer_type_id) # Convention is that a value of None means unset. Remove any keys that # are explicitly set to None so the plugin will default them. if importer_config is not None: clean_config = dict([(k, v) for k, v in importer_config.items() if v is not None]) else: clean_config = None # Let the importer plugin verify the configuration call_config = PluginCallConfiguration(plugin_config, clean_config) transfer_repo = common_utils.to_transfer_repo(repo) result = importer_instance.validate_config(transfer_repo, call_config) # For backward compatibility with plugins that don't yet return the tuple if isinstance(result, bool): valid_config = result message = None else: valid_config, message = result if not valid_config: raise PulpCodedValidationException(validation_errors=message)
def setUp(self): WebTest.setUp(self) self.parentfs = self.tmpdir('parent-') self.childfs = self.tmpdir('child-') self.alias = (self.parentfs, self.parentfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() plugin_api._create_manager() imp_conf = dict(strategy=constants.MIRROR_STRATEGY) plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, imp_conf) plugin_api._MANAGER.distributors.add_plugin(constants.HTTP_DISTRIBUTOR, NodesHttpDistributor, {}) plugin_api._MANAGER.distributors.add_plugin(FAKE_DISTRIBUTOR, FakeDistributor, FAKE_DISTRIBUTOR_CONFIG) plugin_api._MANAGER.profilers.add_plugin(constants.PROFILER_ID, NodeProfiler, {})
def test_create_defaults(self): """ Tests creating a repository with minimal information (ID) is successful. """ # Test self.manager.create_repo('repo_1') # Verify repos = list(Repo.get_collection().find()) self.assertEqual(1, len(repos)) self.assertEqual('repo_1', repos[0]['id']) # Assert the display name is defaulted to the id self.assertEqual('repo_1', repos[0]['display_name'])
def test_delete_repo(self): """ Tests deleting a repo under normal circumstances. """ # Setup id = 'doomed' self.manager.create_repo(id) # Test self.manager.delete_repo(id) # Verify repos = list(Repo.get_collection().find({'id' : id})) self.assertEqual(0, len(repos))
def create_bind_payload(self, repo_id, distributor_id, binding_config): """ Requests the distributor plugin to generate the consumer bind payload. @param repo_id: identifies the repo being bound @type repo_id: str @param distributor_id: identifies the distributor @type distributor_id: str @param binding_config: configuration applicable only to the binding whose payload is being created; may be None @type binding_config: object or None @return: payload object to pass to the consumer @rtype: dict @raise MissingResource: if the repo or distributor do not exist @raise PulpExecutionException: if the distributor raises an error """ # Input Validation repo_distributor = self.get_distributor(repo_id, distributor_id) repo = Repo.get_collection().find_one({'id': repo_id}) distributor_type_id = repo_distributor['distributor_type_id'] distributor_instance, plugin_config = plugin_api.get_distributor_by_id( distributor_type_id) # Let the distributor plugin verify the configuration call_config = PluginCallConfiguration(plugin_config, repo_distributor['config']) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( distributor_type_id, repo_id) try: payload = distributor_instance.create_consumer_payload( transfer_repo, call_config, binding_config) return payload except Exception: msg = _( 'Exception raised from distributor [%(d)s] generating consumer payload' ) msg = msg % {'d': distributor_id} _logger.exception(msg) raise PulpExecutionException(), None, sys.exc_info()[2]
def test_create_and_configure_repo_bad_importer(self): """ Tests cleanup is successful when the add importer step fails. """ # Setup mock_plugins.MOCK_IMPORTER.validate_config.return_value = False, '' # Test self.assertRaises(exceptions.PulpDataException, self.manager.create_and_configure_repo, 'repo-1', importer_type_id='mock-importer') # Verify the repo was deleted repo = Repo.get_collection().find_one({'id' : 'repo-1'}) self.assertTrue(repo is None) # Cleanup mock_plugins.MOCK_IMPORTER.validate_config.return_value = True
def get_repo_scratchpad(self, repo_id): """ Retrieves the contents of the given repository's scratchpad. :param repo_id: identifies the repo :type repo_id: str :raise MissingResource: if there is no repo with repo_id """ repo_coll = Repo.get_collection() repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) return dict(repo['scratchpad'])
def get_importers(self, repo_id): """ Returns a list of all importers associated with the given repo. @return: list of key-value pairs describing the importers in use; empty list if the repo has no importers @rtype: list of dict @raise MissingResource: if the given repo doesn't exist """ repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) importers = RepoImporter.get_collection().find({'repo_id': repo_id}) return list(importers)
def associate(group_id, criteria): """ Associate a set of repos, that match the passed in criteria, to a repo group. @param group_id: unique id of the group to associate repos to @type group_id: str @param criteria: Criteria instance representing the set of repos to associate @type criteria: L{pulp.server.db.model.criteria.Criteria} """ group_collection = validate_existing_repo_group(group_id) repo_collection = Repo.get_collection() cursor = repo_collection.query(criteria) repo_ids = [r['id'] for r in cursor] if not repo_ids: return group_collection.update({'id': group_id}, {'$addToSet': {'repo_ids': {'$each': repo_ids}}}, safe=True)
def publish_history(self, repo_id, distributor_id, limit=None): """ Returns publish history entries for the give repo, sorted from most recent to oldest. If there are no entries, an empty list is returned. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor to retrieve history for @type distributor_id: str @param limit: maximum number of results to return @type limit: int @return: list of publish history result instances @rtype: list of L{pulp.server.db.model.repository.RepoPublishResult} @raise MissingResource: if repo_id does not reference a valid repo """ # Validation repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) dist = RepoDistributor.get_collection().find_one({ 'repo_id': repo_id, 'id': distributor_id }) if dist is None: raise MissingResource(distributor_id) if limit is None: limit = 10 # default here for each of REST API calls into here # Retrieve the entries cursor = RepoPublishResult.get_collection().find({ 'repo_id': repo_id, 'distributor_id': distributor_id }) cursor.limit(limit) cursor.sort('completed', pymongo.DESCENDING) return list(cursor)
def test_with_db(self): REPO_ID = 'repo123' repo_collection = Repo.get_collection() repo_collection.save({'id': REPO_ID, 'content_unit_count': 0}) assoc_collection = RepoContentUnit.get_collection() assoc_collection.insert({'repo_id': REPO_ID, 'unit_type_id': 'rpm', 'unit_id': 'unit1'}) assoc_collection.insert({'repo_id': REPO_ID, 'unit_type_id': 'rpm', 'unit_id': 'unit2'}) self.module.migrate() repo = repo_collection.find({'id': REPO_ID})[0] self.assertTrue('content_unit_count' not in repo) self.assertEqual(repo['content_unit_counts'], {'rpm': 2}) # cleanup repo_collection.remove({'id': REPO_ID}) assoc_collection.remove({'repo_id': REPO_ID})
def rebuild_content_unit_counts(repo_ids=None): """ WARNING: This might take a long time, and it should not be used unless absolutely necessary. Not responsible for melted servers. This will iterate through the given repositories, which defaults to ALL repositories, and recalculate the content unit counts for each content type. This method is called from platform migration 0004, so consult that migration before changing this method. :param repo_ids: list of repository IDs. DEFAULTS TO ALL REPO IDs!!! :type repo_ids: list """ association_collection = RepoContentUnit.get_collection() repo_collection = Repo.get_collection() # default to all repos if none were specified if not repo_ids: repo_ids = [ repo['id'] for repo in repo_collection.find(fields=['id']) ] logger.info('regenerating content unit counts for %d repositories' % len(repo_ids)) for repo_id in repo_ids: logger.debug( 'regenerating content unit count for repository "%s"' % repo_id) counts = {} cursor = association_collection.find({'repo_id': repo_id}) type_ids = cursor.distinct('unit_type_id') cursor.close() for type_id in type_ids: spec = {'repo_id': repo_id, 'unit_type_id': type_id} counts[type_id] = association_collection.find(spec).count() repo_collection.update({'id': repo_id}, {'$set': { 'content_unit_counts': counts }}, safe=True)
def unassociate(group_id, criteria): """ Unassociate a set of repos, that match the passed in criteria, from a repo group. @param group_id: unique id of the group to unassociate repos from @type group_id: str @param criteria: Criteria instance representing the set of repos to unassociate @type criteria: L{pulp.server.db.model.criteria.Criteria} """ group_collection = validate_existing_repo_group(group_id) repo_collection = Repo.get_collection() cursor = repo_collection.query(criteria) repo_ids = [r['id'] for r in cursor] if not repo_ids: return group_collection.update({'id': group_id}, # for some reason, pymongo 1.9 doesn't like this #{'$pull': {'repo_ids': {'$in': repo_ids}}}, {'$pullAll': {'repo_ids': repo_ids}}, safe=True)
def remove_importer(repo_id): """ Removes an importer from a repository. :param repo_id: identifies the repo :type repo_id: str :raise MissingResource: if the given repo does not exist :raise MissingResource: if the given repo does not have an importer """ repo_coll = Repo.get_collection() importer_coll = RepoImporter.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is None: raise MissingResource(repo_id) # remove schedules RepoSyncScheduleManager().delete_by_importer_id( repo_id, repo_importer['id']) # Call the importer's cleanup method importer_type_id = repo_importer['importer_type_id'] importer_instance, plugin_config = plugin_api.get_importer_by_id( importer_type_id) call_config = PluginCallConfiguration(plugin_config, repo_importer['config']) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.importer_working_dir( importer_type_id, repo_id) importer_instance.importer_removed(transfer_repo, call_config) # Update the database to reflect the removal importer_coll.remove({'repo_id': repo_id}, safe=True)
def test_create_and_configure_repo_bad_distributor(self): """ Tests cleanup is successful when the add distributor step fails. """ # Setup mock_plugins.MOCK_DISTRIBUTOR.validate_config.return_value = False, '' # Test distributors = [dict(distributor_type='mock-distributor', distributor_config={}, auto_publish=True, distributor_id=None)] self.assertRaises(exceptions.PulpDataException, self.manager.create_and_configure_repo, 'repo-1', distributor_list=distributors) # Verify the repo was deleted repo = Repo.get_collection().find_one({'id' : 'repo-1'}) self.assertTrue(repo is None) # Cleanup mock_plugins.MOCK_DISTRIBUTOR.validate_config.return_value = True
def _set_current_date_on_field(repo_id, field_name): """ Updates the UTC date record the given field to the current UTC time. :param repo_id: identifies the repo :type repo_id: str :param field_name: field to update :type field_name: str """ spec = {'id': repo_id} operation = { '$set': { field_name: dateutils.now_utc_datetime_with_tzinfo() } } repo_coll = Repo.get_collection() repo_coll.update(spec, operation, safe=True)
def GET(self): """ Looks for query parameters 'importers' and 'distributors', and will add the corresponding fields to the each repository returned. Query parameter 'details' is equivalent to passing both 'importers' and 'distributors'. """ query_params = web.input() all_repos = list( Repo.get_collection().find(projection={'scratchpad': 0})) if query_params.get('details', False): query_params['importers'] = True query_params['distributors'] = True self._process_repos(all_repos, query_params.get('importers', False), query_params.get('distributors', False)) # Return the repos or an empty list; either way it's a 200 return self.ok(all_repos)
def get_distributors(self, repo_id): """ Returns all distributors on the given repo. @param repo_id: identifies the repo @type repo_id: str @return: list of key-value pairs describing the distributors; empty list if there are none for the given repo @rtype: list, None @raise MissingResource: if the given repo doesn't exist """ repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repository=repo_id) distributors = list(RepoDistributor.get_collection().find( {'repo_id': repo_id})) return distributors
def update_repo_scratchpad(self, repo_id, scratchpad): """ Update the repository scratchpad with the specified key-value pairs. New keys are added, existing keys are updated. :param repo_id: A repository ID :type repo_id: str :param scratchpad: a dict used to update the scratchpad. :type scratchpad: dict :raise MissingResource: if there is no repo with repo_id """ properties = {} for k, v in scratchpad.items(): key = 'scratchpad.%s' % k properties[key] = v collection = Repo.get_collection() result = collection.update({'id': repo_id}, {'$set': properties}, safe=True) if result['n'] == 0: raise MissingResource(repo_id=repo_id)
def test_update_repo(self): """ Tests the case of successfully updating a repo. """ # Setup self.manager.create_repo('update-me', display_name='display_name_1', description='description_1', notes={ 'a': 'a', 'b': 'b', 'c': 'c' }) delta = { 'display_name': 'display_name_2', 'description': 'description_2', 'notes': { 'b': 'x', 'c': None }, 'disregard': 'ignored', } # Test updated = self.manager.update_repo('update-me', delta) # Verify expected_notes = {'a': 'a', 'b': 'x'} repo = Repo.get_collection().find_one({'id': 'update-me'}) self.assertEqual(repo['display_name'], delta['display_name']) self.assertEqual(repo['description'], delta['description']) self.assertEqual(repo['notes'], expected_notes) self.assertEqual(updated['display_name'], delta['display_name']) self.assertEqual(updated['description'], delta['description']) self.assertEqual(updated['notes'], expected_notes)
def set_repo_scratchpad(self, repo_id, scratchpad): """ Saves the given contents to the repository's scratchpad. There is no attempt to merge in the provided with the current scratchpad, it is simply overridden. When possible, use update_scratchpad() instead. :param repo_id: identifies the repo :type repo_id: str :param scratchpad: new value to save in the scratchpad; must be a dict. :raise MissingResource: if there is no repo with repo_id """ if not isinstance(scratchpad, dict): raise ValueError('scratchpad must be a dict') collection = Repo.get_collection() result = collection.update({'id': repo_id}, {'$set': { 'scratchpad': scratchpad }}, safe=True) if result['n'] == 0: raise MissingResource(repo_id=repo_id)
def test_repo_delete(self): # Setup self.populate() # Test itinerary = repo_delete_itinerary(self.REPO_ID) call_reports = self.coordinator.execute_multiple_calls(itinerary) # Verify self.assertEqual(len(call_reports), 4) for call in call_reports: self.assertNotEqual(call.state, dispatch_constants.CALL_REJECTED_RESPONSE) # run task #1: repo delete self.run_next() # verify repo deleted repo = Repo.get_collection().find_one({'id': self.REPO_ID}) self.assertTrue(repo is None)
def find_with_importer_type(self, importer_type_id): """ Returns a list of repositories, including importer configuration, for all repositories that have a configured importer of the given type. The importer for each repository will be stored in the repo under the key "importer". @return: list of repository dictionaries @rtype: list """ # Only one importer per repo, so no need for supporting multiple results = [] repo_importers = list(RepoImporter.get_collection().find( {'importer_type_id': importer_type_id})) for ri in repo_importers: repo = Repo.get_collection().find_one({'id': ri['repo_id']}) repo['importers'] = [ri] results.append(repo) return results