def clean(self): super(RepoPublishConduitTests, self).clean() mock_plugins.reset() Repo.get_collection().remove() RepoDistributor.get_collection().remove()
def clean(self): Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def clean(self): super(RepoDistributorManagerTests, self).clean() mock_plugins.MOCK_DISTRIBUTOR.reset_mock() Repo.get_collection().remove() RepoDistributor.get_collection().remove()
def test_import(self): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() repo = Repository(self.REPO_ID) cfg = { 'protocol':'file', 'http':{'alias':self.alias}, 'https':{'alias':self.alias}, 'file':{'alias':self.alias}, } conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = 'file://' + publisher.manifest_path() cfg = dict(manifest_url=manifest_url, strategy=constants.MIRROR_STRATEGY) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) importer.sync_repo(repo, conduit, cfg) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def tearDown(self): base.PulpWebserviceTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def tearDown(self): super(BindManagerTests, self).tearDown() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def clean(self): super(RepoManagerTests, self).clean() model.Repository.drop_collection() RepoImporter.get_collection().remove() RepoDistributor.get_collection().remove() TaskStatus.objects().delete()
def tearDown(self): super(ConsumerTest, self).tearDown() Consumer.get_collection().remove(safe=True) Repo.get_collection().remove(safe=True) RepoDistributor.get_collection().remove(safe=True) Bind.get_collection().remove(safe=True) mock_plugins.reset()
def clean(self): super(RepoManagerTests, self).clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoDistributor.get_collection().remove() dispatch.TaskStatus.objects().delete()
def tearDown(self): PulpItineraryTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def test_delete_with_plugins(self): """ Tests that deleting a repo that has importers and distributors configured deletes them as well. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-2') self.assertEqual(1, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(2, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) # Test self.manager.delete_repo('doomed') # Verify self.assertEqual(0, len(list(Repo.get_collection().find()))) self.assertEqual(0, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(0, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) self.assertEqual(2, mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.call_count) repo_working_dir = common_utils.repository_working_dir('doomed', mkdir=False) self.assertTrue(not os.path.exists(repo_working_dir))
def tearDown(self): super(self.__class__, self).tearDown() Consumer.get_collection().remove() ConsumerGroup.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def setUp(self): super(BindManagerTests, self).setUp() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install()
def tearDown(self): super(BindManagerTests, self).tearDown() Consumer.get_collection().remove() model.Repository.drop_collection() RepoDistributor.get_collection().remove() Bind.get_collection().remove() ConsumerHistoryEvent.get_collection().remove() mock_plugins.reset()
def setUp(self): base.PulpWebserviceTests.setUp(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install()
def tearDown(self): PulpRPMTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoContentUnit.get_collection().remove() RepoDistributor.get_collection().remove() database.clean() plugins.finalize()
def setUp(self): PulpItineraryTests.setUp(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install() mock_agent.install()
def setUp(self): super(self.__class__, self).setUp() Consumer.get_collection().remove() ConsumerGroup.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install()
def tearDown(self): super(BaseProfilerConduitTests, self).tearDown() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() RepoContentUnit.get_collection().remove() UnitProfile.get_collection().remove() typedb.clean() factory.reset()
def setUp(self): super(BaseProfilerConduitTests, self).setUp() Consumer.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() RepoContentUnit.get_collection().remove() UnitProfile.get_collection().remove() plugin_api._create_manager() typedb.update_database([self.TYPE_1_DEF, self.TYPE_2_DEF]) mock_plugins.install()
def tearDown(self): ServerTests.tearDown(self) shutil.rmtree(self.parentfs) shutil.rmtree(self.childfs) Consumer.get_collection().remove() Bind.get_collection().remove() model.Repository.drop_collection() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def clean(self, units_only=False, plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if units_only: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if plugins: plugin_api._MANAGER.distributors.plugins = {}
def tearDown(self): WebTest.tearDown(self) shutil.rmtree(self.parentfs) shutil.rmtree(self.childfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def test_last_missing_distributor(self): """ Tests getting last publish for a distributor that doesn't exist """ # Setup dist = RepoDistributor("repo-1", "dist-1", "type-1", None, True) RepoDistributor.get_collection().save(dist) # Test self.assertRaises(MissingResource, self.publish_manager.last_publish, "repo-1", "random-dist")
def clean(self, just_units=False, purge_plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if just_units: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if purge_plugins: plugin_api._MANAGER.importers.plugins = {} plugin_api._MANAGER.distributors.plugins = {}
def test_last_publish_never_published(self): """ Tests getting the last publish date for an unpublished repo. """ # Setup dist = RepoDistributor("repo-1", "dist-1", "type-1", None, True) RepoDistributor.get_collection().save(dist) # Test last = self.publish_manager.last_publish("repo-1", "dist-1") # should not error # Verify self.assertTrue(last is None)
def _do_publish(self, repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: publish_report = distributor_instance.publish_repo(transfer_repo, conduit, call_config) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id}) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result(repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _LOG.exception(_('Exception caught from plugin during publish for repo [%(r)s]' % {'r' : repo_id})) raise PulpExecutionException(), None, sys.exc_info()[2]
def set_distributor_scratchpad(self, repo_id, distributor_id, contents): """ Sets the value of the scratchpad for the given repo and saves it to the database. If there is a previously saved value it will be replaced. If there is no distributor with the given ID on the repo, this call does nothing. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor on the repo @type distributor_id: str @param contents: value to write to the scratchpad field @type contents: anything that can be saved in the database """ distributor_coll = RepoDistributor.get_collection() # Validation repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id}) if repo_distributor is None: return # Update repo_distributor['scratchpad'] = contents distributor_coll.save(repo_distributor, safe=True)
def get_distributor_scratchpad(self, repo_id, distributor_id): """ Returns the contents of the distributor's scratchpad for the given repo. If there is no such distributor or the scratchpad has not been set, None is returned. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor on the repo @type distributor_id: str @return: value set for the distributor's scratchpad @rtype: anything that can be saved in the database """ distributor_coll = RepoDistributor.get_collection() # Validation repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id}) if repo_distributor is None: return None scratchpad = repo_distributor.get('scratchpad', None) return scratchpad
def last_publish(self, repo_id, distributor_id): """ Returns the timestamp of the last publish call, regardless of its success or failure. If the repo has never been published, returns None. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the repo's distributor @type distributor_id: str @return: timestamp of the last publish @rtype: datetime or None @raise MissingResource: if there is no distributor identified by the given repo ID and distributor ID """ # Validation coll = RepoDistributor.get_collection() repo_distributor = coll.find_one({'repo_id' : repo_id, 'id' : distributor_id}) if repo_distributor is None: raise MissingResource(repo_id) # Convert to datetime instance date_str = repo_distributor['last_publish'] if date_str is None: return date_str else: instance = dateutils.parse_iso8601_datetime(date_str) return instance
def test_add_distributor_no_explicit_id(self): """ Tests the ID generation when one is not specified for a distributor. """ # Setup self.repo_manager.create_repo('happy-repo') # Test added = self.distributor_manager.add_distributor( 'happy-repo', 'mock-distributor', {}, True) # Verify distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'happy-repo', 'id': added['id'] }) self.assertTrue(distributor is not None)
def remove_publish_schedule(self, repo_id, distributor_id, schedule_id): """ Add a scheduled publish for the repo to the given distributor. @param repo_id: @param distributor_id: @param schedule_id: """ collection = RepoDistributor.get_collection() distributor = collection.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if distributor is None: raise MissingResource(repo=repo_id, distributor=distributor_id) if schedule_id not in distributor['scheduled_publishes']: return collection.update({'_id': distributor['_id']}, {'$pull': { 'scheduled_publishes': schedule_id }})
def get_distributors(self, repo_id): """ Returns all distributors on the given repo. @param repo_id: identifies the repo @type repo_id: str @return: list of key-value pairs describing the distributors; empty list if there are none for the given repo @rtype: list, None @raise MissingResource: if the given repo doesn't exist """ repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repository=repo_id) distributors = list(RepoDistributor.get_collection().find( {'repo_id': repo_id})) return distributors
def get_repo_distributors_by_relative_url(self, rel_url, repo_id=None): """ Get the config repo_id and config objects matching a given relative URL :param rel_url: a relative URL for a distributor config :type rel_url: str :param repo_id: the id of a repo to skip, If not specified all repositories will be included in the search :type repo_id: str :return: a cursor to iterate over the list of repository configurations whose configuration conflicts with rel_url :rtype: pymongo.cursor.Cursor """ # build a list of all the sub urls that could conflict with the provided URL current_url_pieces = [x for x in rel_url.split("/") if x] matching_url_list = [] workingUrl = "/" for piece in current_url_pieces: workingUrl += piece matching_url_list.append(workingUrl) workingUrl += "/" # calculate the base field of the URL, this is used for tests where the repo id # is used as a substitute for the relative url: /repo-id/ repo_id_url = current_url_pieces[0] #search for all the sub url as well as any url that would fall within the specified url spec = {'$or': [{'config.relative_url': {'$regex': '^' + workingUrl + '.*'}}, {'config.relative_url': {'$in': matching_url_list}}, {'$and': [{'config.relative_url': {'$exists': False}}, {'repo_id': repo_id_url}]} ]} if repo_id is not None: spec = {'$and': [{'repo_id': {'$ne': repo_id}}, spec]} projection = {'repo_id': 1, 'config': 1} return RepoDistributor.get_collection().find(spec, projection)
def test_remove_distributor(self, mock_delete_schedules): """ Tests removing an existing distributor from a repository. """ # Setup self.repo_manager.create_repo('dist-repo') self.distributor_manager.add_distributor('dist-repo', 'mock-distributor', {}, True, distributor_id='doomed') # Test self.distributor_manager.remove_distributor('dist-repo', 'doomed') # Verify distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'dist-repo', 'id': 'doomed' }) self.assertTrue(distributor is None) mock_delete_schedules.assert_called_once_with('dist-repo', 'doomed')
def test_add_distributor_multiple_distributors(self, mock_repo_qs): """ Tests adding a second distributor to a repository. """ self.distributor_manager.add_distributor('test_me', 'mock-distributor', {}, True, distributor_id='dist_1') # Test self.distributor_manager.add_distributor('test_me', 'mock-distributor-2', {}, True, distributor_id='dist_2') # Verify all_distributors = list(RepoDistributor.get_collection().find()) self.assertEqual(2, len(all_distributors)) dist_ids = [d['id'] for d in all_distributors] self.assertTrue('dist_1' in dist_ids) self.assertTrue('dist_2' in dist_ids)
def _do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler( distributor_instance.publish_repo, distributor_instance.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_id})) raise
def last_publish(self): """ Returns the timestamp of the last time this repo was published, regardless of the success or failure of the publish. If the repo was never published, this call returns None. :return: timestamp instance describing the last publish :rtype: datetime.datetime or None :raises DistributorConduitException: if any errors occur """ try: collection = RepoDistributor.get_collection() distributor = collection.find_one({ 'repo_id': self.repo_id, 'id': self.distributor_id }) if distributor is None: raise pulp_exceptions.MissingResource(self.repo_id) return distributor['last_publish'] except Exception, e: _logger.exception('Error getting last publish time for repo [%s]' % self.repo_id) raise DistributorConduitException(e), None, sys.exc_info()[2]
def get_distributor(self, repo_id, distributor_id): """ Returns an individual distributor on the given repo. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor @type distributor_id: str @return: key-value pairs describing the distributor @rtype: dict @raise MissingResource: if either the repo doesn't exist or there is no distributor with the given ID """ distributor = RepoDistributor.get_collection().find_one( {'repo_id': repo_id, 'id': distributor_id}) if distributor is None: raise MissingResource(distributor=distributor_id) return distributor
def last_publish(self, repo_id, distributor_id): """ Returns the timestamp of the last publish call, regardless of its success or failure. If the repo has never been published, returns None. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the repo's distributor @type distributor_id: str @return: timestamp of the last publish @rtype: datetime or None @raise MissingResource: if there is no distributor identified by the given repo ID and distributor ID """ # Validation coll = RepoDistributor.get_collection() repo_distributor = coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(repo_id) # Convert to datetime instance date_str = repo_distributor['last_publish'] if date_str is None: return date_str else: instance = dateutils.parse_iso8601_datetime(date_str) return instance
class RepoManager(object): """ Performs repository related functions relating to both CRUD operations and actions performed on or by repositories. """ @staticmethod def create_repo(repo_id, display_name=None, description=None, notes=None): """ Creates a new Pulp repository that is not associated with any importers or distributors (those are added later through separate calls). :param repo_id: unique identifier for the repo :type repo_id: str :param display_name: user-friendly name for the repo :type display_name: str :param description: user-friendly text describing the repo's contents :type description: str :param notes: key-value pairs to programmatically tag the repo :type notes: dict :raise DuplicateResource: if there is already a repo with the requested ID :raise InvalidValue: if any of the fields are unacceptable """ existing_repo = Repo.get_collection().find_one({'id': repo_id}) if existing_repo is not None: raise DuplicateResource(repo_id) if repo_id is None or not is_repo_id_valid(repo_id): raise InvalidValue(['repo_id']) if notes is not None and not isinstance(notes, dict): raise InvalidValue(['notes']) # Use the ID for the display name if one was not specified display_name = display_name or repo_id # Creation create_me = Repo(repo_id, display_name, description, notes) Repo.get_collection().save(create_me, safe=True) # Retrieve the repo to return the SON object created = Repo.get_collection().find_one({'id': repo_id}) return created @staticmethod def create_and_configure_repo(repo_id, display_name=None, description=None, notes=None, importer_type_id=None, importer_repo_plugin_config=None, distributor_list=()): """ Aggregate method that will create a repository and add importers and distributors in a single call. If there is an issue adding any of the importers or distributors, This call will aggregate calls to RepoImporterManager.set_importer and RepoDistributorManager.add_distributor. Documentation for those methods should be consulted for more information on the parameters to this method that correspond to those calls. Multiple distributors can be created in this call. Each distributor is specified as a dict with the following keys: distributor_type - ID of the type of distributor being added distributor_config - values sent to the distributor when used by this repository auto_publish - boolean indicating if the distributor should automatically publish with every sync; defaults to False distributor_id - used to refer to the distributor later; if omitted, one will be generated :param repo_id: unique identifier for the repo :type repo_id: str :param display_name: user-friendly name for the repo :type display_name: str :param description: user-friendly text describing the repo's contents :type description: str :param notes: key-value pairs to programmatically tag the repo :type notes: dict :param importer_type_id: if specified, an importer with this type ID will be added to the repo :type importer_type_id: str :param distributor_list: list of dicts describing the distributors to add; more details in the docstring above :type distributor_list: list :raise DuplicateResource: if there is already a repo with the requested ID :raise InvalidValue: if any of the non-ID fields is unacceptable """ # Let any exceptions out of this call simply bubble up, there's nothing # special about this step. repo = RepoManager.create_repo(repo_id, display_name=display_name, description=description, notes=notes) # Add the importer if it's specified. If that fails, delete the repository # before re-raising the exception. if importer_type_id is not None: importer_manager = manager_factory.repo_importer_manager() try: importer_manager.set_importer(repo_id, importer_type_id, importer_repo_plugin_config) except Exception: _logger.exception( 'Exception adding importer to repo [%s]; the repo will be deleted' % repo_id) RepoManager.delete_repo(repo_id) raise # Regardless of how many distributors are successfully added, or if an # importer was added, we only need a single call to delete_repo in the # error block. That call will take care of all of the cleanup. distributor_manager = manager_factory.repo_distributor_manager() if distributor_list is not None and not isinstance( distributor_list, (list, tuple)): RepoManager.delete_repo(repo_id) raise InvalidValue(['distributor_list']) for distributor in distributor_list or []: if not isinstance(distributor, dict): RepoManager.delete_repo(repo_id) raise InvalidValue(['distributor_list']) try: # Don't bother with any validation here, the manager will run it type_id = distributor.get('distributor_type_id') plugin_config = distributor.get('distributor_config') auto_publish = distributor.get('auto_publish', False) distributor_id = distributor.get('distributor_id') distributor_manager.add_distributor(repo_id, type_id, plugin_config, auto_publish, distributor_id) except Exception: _logger.exception( 'Exception adding distributor to repo [%s]; the repo will be ' 'deleted' % repo_id) RepoManager.delete_repo(repo_id) raise return repo @staticmethod def delete_repo(repo_id): """ Deletes the given repository, optionally requesting the associated importer clean up any content in the repository. :param repo_id: identifies the repo being deleted :type repo_id: str :raise MissingResource: if the given repo does not exist :raise OperationFailed: if any part of the delete process fails; the exception will contain information on which sections failed """ # Validation found = Repo.get_collection().find_one({'id': repo_id}) if found is None: raise MissingResource(repo_id) # With so much going on during a delete, it's possible that a few things # could go wrong while others are successful. We track lesser errors # that shouldn't abort the entire process until the end and then raise # an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user # will have to look at the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _logger.exception( 'Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append(e) # Inform all distributors distributor_coll = RepoDistributor.get_collection() repo_distributors = list(distributor_coll.find({'repo_id': repo_id})) for repo_distributor in repo_distributors: try: distributor_manager.remove_distributor(repo_id, repo_distributor['id']) except Exception, e: _logger.exception( 'Error received removing distributor [%s] from repo [%s]' % (repo_distributor['id'], repo_id)) error_tuples.append(e)
def test_calls_delete_resource(self, mock_delete_by): resource = RepoDistributor.build_resource_tag(self.repo, self.distributor) RepoPublishScheduleManager.delete_by_distributor_id(self.repo, self.distributor) mock_delete_by.assert_called_once_with(resource)
def _do_publish(repo_obj, dist_id, dist_inst, transfer_repo, conduit, call_config): """ Publish the repository using the given distributor. :param repo_obj: repository object :type repo_obj: pulp.server.db.model.Repository :param dist_id: identifies the distributor :type dist_id: str :param dist_inst: instance of the distributor :type dist_inst: dict :param transfer_repo: dict representation of a repo for the plugins to use :type transfer_repo: pulp.plugins.model.Repository :param conduit: allows the plugin to interact with core pulp :type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param call_config: allows the plugin to retrieve values :type call_config: pulp.plugins.config.PluginCallConfiguration :return: publish result containing information about the publish :rtype: pulp.server.db.model.repository.RepoPublishResult :raises pulp_exceptions.PulpCodedException: if the publish report's success flag is falsey """ distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler(dist_inst.publish_repo, dist_inst.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) if publish_report is not None and hasattr(publish_report, 'success_flag') \ and not publish_report.success_flag: raise pulp_exceptions.PulpCodedException( error_code=error_codes.PLP0034, repository_id=repo_obj.repo_id, distributor_id=dist_id) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_obj.repo_id, 'id': dist_id }) distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_obj.repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_obj.repo_id})) raise
def clean(self): super(ScheduleTests, self).clean() Repo.get_collection().remove(safe=True) RepoDistributor.get_collection().remove(safe=True) RepoImporter.get_collection().remove(safe=True) ScheduledCall.get_collection().remove(safe=True)
def update_distributor_config(repo_id, distributor_id, distributor_config, auto_publish=None): """ Attempts to update the saved configuration for the given distributor. The distributor will be asked if the new configuration is valid. If not, this method will raise an error and the existing configuration will remain unchanged. :param repo_id: identifies the repo :type repo_id: str :param distributor_id: identifies the distributor on the repo :type distributor_id: str :param distributor_config: new configuration values to use :type distributor_config: dict :param auto_publish: If true, this distributor is used automatically during a sync operation :type auto_publish: bool :return: the updated distributor :rtype: dict :raise MissingResource: if the given repo or distributor doesn't exist :raise PulpDataException: if the plugin rejects the given changes """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Input Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repository=repo_id) repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(distributor=distributor_id) distributor_type_id = repo_distributor['distributor_type_id'] distributor_instance, plugin_config = plugin_api.get_distributor_by_id( distributor_type_id) # The supplied config is a delta of changes to make to the existing config. # The plugin expects a full configuration, so we apply those changes to # the original config and pass that to the plugin's validate method. merged_config = dict(repo_distributor['config']) # The convention is that None in an update is removing the value and # setting it to the default. Find all such properties in this delta and # remove them from the existing config if they are there. unset_property_names = [ k for k in distributor_config if distributor_config[k] is None ] for key in unset_property_names: merged_config.pop(key, None) distributor_config.pop(key, None) # Whatever is left over are the changed/added values, so merge them in. merged_config.update(distributor_config) # Let the distributor plugin verify the configuration call_config = PluginCallConfiguration(plugin_config, merged_config) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( distributor_type_id, repo_id) config_conduit = RepoConfigConduit(distributor_type_id) try: result = distributor_instance.validate_config( transfer_repo, call_config, config_conduit) # For backward compatibility with plugins that don't yet return the tuple if isinstance(result, bool): valid_config = result message = None else: valid_config, message = result except Exception, e: msg = _( 'Exception raised from distributor [%(d)s] while validating config for repo ' '[%(r)s]') msg = msg % {'d': distributor_type_id, 'r': repo_id} logger.exception(msg) raise PulpDataException(e.args), None, sys.exc_info()[2]
class RepoDistributorManager(object): def get_distributor(self, repo_id, distributor_id): """ Returns an individual distributor on the given repo. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor @type distributor_id: str @return: key-value pairs describing the distributor @rtype: dict @raise MissingResource: if either the repo doesn't exist or there is no distributor with the given ID """ distributor = RepoDistributor.get_collection().find_one({ 'repo_id': repo_id, 'id': distributor_id }) if distributor is None: raise MissingResource(distributor=distributor_id) return distributor def get_distributors(self, repo_id): """ Returns all distributors on the given repo. @param repo_id: identifies the repo @type repo_id: str @return: list of key-value pairs describing the distributors; empty list if there are none for the given repo @rtype: list, None @raise MissingResource: if the given repo doesn't exist """ repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repository=repo_id) distributors = list(RepoDistributor.get_collection().find( {'repo_id': repo_id})) return distributors @staticmethod def find_by_repo_list(repo_id_list): """ Returns serialized versions of all distributors for given repos. Any IDs that do not refer to valid repos are ignored and will not raise an error. @param repo_id_list: list of distributor IDs to fetch @type repo_id_list: list of str @return: list of serialized distributors @rtype: list of dict """ spec = {'repo_id': {'$in': repo_id_list}} projection = {'scratchpad': 0} return list(RepoDistributor.get_collection().find(spec, projection)) @staticmethod def add_distributor(repo_id, distributor_type_id, repo_plugin_config, auto_publish, distributor_id=None): """ Adds an association from the given repository to a distributor. The association will be tracked through the distributor_id; each distributor on a given repository must have a unique ID. If this is not specified, one will be generated. If a distributor already exists on the repo for the given ID, the existing one will be removed and replaced with the newly configured one. :param repo_id: identifies the repo :type repo_id: str :param distributor_type_id: identifies the distributor; must correspond to a distributor loaded at server startup :type distributor_type_id: str :param repo_plugin_config: configuration the repo will use with this distributor; may be None :type repo_plugin_config: dict :param auto_publish: if true, this distributor will be invoked at the end of every sync :type auto_publish: bool :param distributor_id: unique ID to refer to this distributor for this repo :type distributor_id: str :return: ID assigned to the distributor (only valid in conjunction with the repo) :raise MissingResource: if the given repo_id does not refer to a valid repo :raise InvalidValue: if the distributor ID is provided and unacceptable :raise InvalidDistributorConfiguration: if the distributor plugin does not accept the given configuration """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repository=repo_id) if not plugin_api.is_valid_distributor(distributor_type_id): raise InvalidValue(['distributor_type_id']) # Determine the ID for this distributor on this repo; will be # unique for all distributors on this repository but not globally if distributor_id is None: distributor_id = str(uuid.uuid4()) else: # Validate if one was passed in if not is_distributor_id_valid(distributor_id): raise InvalidValue(['distributor_id']) distributor_instance, plugin_config = plugin_api.get_distributor_by_id( distributor_type_id) # Convention is that a value of None means unset. Remove any keys that # are explicitly set to None so the plugin will default them. if repo_plugin_config is not None: clean_config = dict([(k, v) for k, v in repo_plugin_config.items() if v is not None]) else: clean_config = None # Let the distributor plugin verify the configuration call_config = PluginCallConfiguration(plugin_config, clean_config) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( distributor_type_id, repo_id) config_conduit = RepoConfigConduit(distributor_type_id) try: result = distributor_instance.validate_config( transfer_repo, call_config, config_conduit) # For backward compatibility with plugins that don't yet return the tuple if isinstance(result, bool): valid_config = result message = None else: valid_config, message = result except Exception, e: logger.exception( 'Exception received from distributor [%s] while validating config' % distributor_type_id) raise PulpDataException(e.args), None, sys.exc_info()[2] if not valid_config: raise PulpDataException(message) # Remove the old distributor if it exists try: RepoDistributorManager.remove_distributor(repo_id, distributor_id) except MissingResource: pass # if it didn't exist, no problem # Let the distributor plugin initialize the repository try: distributor_instance.distributor_added(transfer_repo, call_config) except Exception: logger.exception( 'Error initializing distributor [%s] for repo [%s]' % (distributor_type_id, repo_id)) raise PulpExecutionException(), None, sys.exc_info()[2] # Database Update distributor = RepoDistributor(repo_id, distributor_id, distributor_type_id, clean_config, auto_publish) distributor_coll.save(distributor, safe=True) return distributor
def publish_history(self, repo_id, distributor_id, limit=None, sort=constants.SORT_DESCENDING, start_date=None, end_date=None): """ Returns publish history entries for the give repo, sorted from most recent to oldest. If there are no entries, an empty list is returned. :param repo_id: identifies the repo :type repo_id: str :param distributor_id: identifies the distributor to retrieve history for :type distributor_id: str :param limit: if specified, the query will only return up to this amount of entries; default is to return the entire publish history :type limit: int :param sort: Indicates the sort direction of the results, which are sorted by start date. Options are "ascending" and "descending". Descending is the default. :type sort: str :param start_date: if specified, no events prior to this date will be returned. Expected to be an iso8601 datetime string. :type start_date: str :param end_date: if specified, no events after this date will be returned. Expected to be an iso8601 datetime string. :type end_date: str :return: list of publish history result instances :rtype: list :raise MissingResource: if repo_id does not reference a valid repo :raise InvalidValue: if one or more of the options have invalid values """ # Validation repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) dist = RepoDistributor.get_collection().find_one({ 'repo_id': repo_id, 'id': distributor_id }) if dist is None: raise MissingResource(distributor_id) invalid_values = [] # Verify the limit makes sense if limit is not None: try: limit = int(limit) if limit < 1: invalid_values.append('limit') except ValueError: invalid_values.append('limit') # Verify the sort direction is valid if sort not in constants.SORT_DIRECTION: invalid_values.append('sort') # Verify that start_date and end_date is valid if start_date is not None: try: dateutils.parse_iso8601_datetime(start_date) except (ValueError, isodate.ISO8601Error): invalid_values.append('start_date') if end_date is not None: try: dateutils.parse_iso8601_datetime(end_date) except (ValueError, isodate.ISO8601Error): invalid_values.append('end_date') # Report any invalid values if invalid_values: raise InvalidValue(invalid_values) # Assemble the mongo search parameters search_params = {'repo_id': repo_id, 'distributor_id': distributor_id} date_range = {} if start_date: date_range['$gte'] = start_date if end_date: date_range['$lte'] = end_date if len(date_range) > 0: search_params['started'] = date_range # Retrieve the entries cursor = RepoPublishResult.get_collection().find(search_params) # Sort the results on the 'started' field. By default, descending order is used cursor.sort('started', direction=constants.SORT_DIRECTION[sort]) if limit is not None: cursor.limit(limit) return list(cursor)
def tearDown(self): super(TestDoPublish, self).tearDown() mock_plugins.reset() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoPublishResult.get_collection().remove()
def test_list(self, mock_get_by_resource, mock_validate_distributor): ret = RepoPublishScheduleManager.list('repo1', 'distributor1') mock_get_by_resource.assert_called_once_with( RepoDistributor.build_resource_tag('repo1', 'distributor1')) self.assertTrue(ret is mock_get_by_resource.return_value)
scheduler.remove(schedule_id) # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _LOG.exception( 'Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append((_('Importer Delete Error'), e.args)) # Inform all distributors distributor_coll = RepoDistributor.get_collection() repo_distributors = list(distributor_coll.find({'repo_id': repo_id})) for repo_distributor in repo_distributors: try: distributor_manager.remove_distributor(repo_id, repo_distributor['id']) except Exception, e: _LOG.exception( 'Error received removing distributor [%s] from repo [%s]' % (repo_distributor['id'], repo_id)) error_tuples.append((_('Distributor Delete Error'), e.args)) # Delete the repository working directory repo_working_dir = common_utils.repository_working_dir(repo_id, mkdir=False) if os.path.exists(repo_working_dir):
def add_distributor(repo_id, distributor_type_id, repo_plugin_config, auto_publish, distributor_id=None): """ Adds an association from the given repository to a distributor. The association will be tracked through the distributor_id; each distributor on a given repository must have a unique ID. If this is not specified, one will be generated. If a distributor already exists on the repo for the given ID, the existing one will be removed and replaced with the newly configured one. :param repo_id: identifies the repo :type repo_id: str :param distributor_type_id: identifies the distributor; must correspond to a distributor loaded at server startup :type distributor_type_id: str :param repo_plugin_config: configuration the repo will use with this distributor; may be None :type repo_plugin_config: dict :param auto_publish: if true, this distributor will be invoked at the end of every sync :type auto_publish: bool :param distributor_id: unique ID to refer to this distributor for this repo :type distributor_id: str :return: ID assigned to the distributor (only valid in conjunction with the repo) :raise MissingResource: if the given repo_id does not refer to a valid repo :raise InvalidValue: if the distributor ID is provided and unacceptable :raise InvalidDistributorConfiguration: if the distributor plugin does not accept the given configuration """ distributor_coll = RepoDistributor.get_collection() repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id) if not plugin_api.is_valid_distributor(distributor_type_id): raise InvalidValue(['distributor_type_id']) # Determine the ID for this distributor on this repo; will be # unique for all distributors on this repository but not globally if distributor_id is None: distributor_id = str(uuid.uuid4()) else: # Validate if one was passed in if not is_distributor_id_valid(distributor_id): raise InvalidValue(['distributor_id']) distributor_instance, plugin_config = plugin_api.get_distributor_by_id(distributor_type_id) # Convention is that a value of None means unset. Remove any keys that # are explicitly set to None so the plugin will default them. if repo_plugin_config is not None: clean_config = dict([(k, v) for k, v in repo_plugin_config.items() if v is not None]) else: clean_config = None # Let the distributor plugin verify the configuration call_config = PluginCallConfiguration(plugin_config, clean_config) config_conduit = RepoConfigConduit(distributor_type_id) transfer_repo = repo_obj.to_transfer_repo() result = distributor_instance.validate_config(transfer_repo, call_config, config_conduit) # For backward compatibility with plugins that don't yet return the tuple if isinstance(result, bool): valid_config = result message = None else: valid_config, message = result if not valid_config: raise PulpDataException(message) # Remove the old distributor if it exists try: RepoDistributorManager.remove_distributor(repo_id, distributor_id) except MissingResource: pass # if it didn't exist, no problem # Let the distributor plugin initialize the repository try: distributor_instance.distributor_added(transfer_repo, call_config) except Exception: msg = _('Error initializing distributor [%(d)s] for repo [%(r)s]') msg = msg % {'d': distributor_type_id, 'r': repo_id} _logger.exception(msg) raise PulpExecutionException(), None, sys.exc_info()[2] # Database Update distributor = RepoDistributor(repo_id, distributor_id, distributor_type_id, clean_config, auto_publish) distributor_coll.save(distributor, safe=True) return distributor
def test_add_distributor(self): """ Tests adding a distributor to a new repo. """ # Setup self.repo_manager.create_repo('test_me') config = {'key1': 'value1', 'key2': None} # Test added = self.distributor_manager.add_distributor( 'test_me', 'mock-distributor', config, True, distributor_id='my_dist') # Verify expected_config = {'key1': 'value1'} # Database all_distributors = list(RepoDistributor.get_collection().find()) self.assertEqual(1, len(all_distributors)) self.assertEqual('my_dist', all_distributors[0]['id']) self.assertEqual('mock-distributor', all_distributors[0]['distributor_type_id']) self.assertEqual('test_me', all_distributors[0]['repo_id']) self.assertEqual(expected_config, all_distributors[0]['config']) self.assertTrue(all_distributors[0]['auto_publish']) # Returned Value self.assertEqual('my_dist', added['id']) self.assertEqual('mock-distributor', added['distributor_type_id']) self.assertEqual('test_me', added['repo_id']) self.assertEqual(expected_config, added['config']) self.assertTrue(added['auto_publish']) # Plugin - Validate Config self.assertEqual( 1, mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_count) call_repo = mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_args[0][ 0] call_config = mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_args[ 0][1] self.assertTrue(isinstance(call_repo, Repository)) self.assertEqual('test_me', call_repo.id) self.assertTrue(isinstance(call_config, PluginCallConfiguration)) self.assertTrue(call_config.plugin_config is not None) self.assertEqual(call_config.repo_plugin_config, expected_config) # Plugin - Distributor Added self.assertEqual( 1, mock_plugins.MOCK_DISTRIBUTOR.distributor_added.call_count) call_repo = mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_args[0][ 0] call_config = mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_args[ 0][1] self.assertTrue(isinstance(call_repo, Repository)) self.assertTrue(isinstance(call_config, PluginCallConfiguration))
def publish(repo_id, distributor_id, publish_config_override=None): """ Requests the given distributor publish the repository it is configured on. The publish operation is executed synchronously in the caller's thread and will block until it is completed. The caller must take the necessary steps to address the fact that a publish call may be time intensive. @param repo_id: identifies the repo being published @type repo_id: str @param distributor_id: identifies the repo's distributor to publish @type distributor_id: str @param publish_config_override: optional config values to use for this publish call only @type publish_config_override: dict, None :return: report of the details of the publish :rtype: pulp.server.db.model.repository.RepoPublishResult """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(repository=repo_id, distributor=distributor_id) distributor_instance, distributor_config = RepoPublishManager.\ _get_distributor_instance_and_config(repo_id, distributor_id) # Assemble the data needed for the publish conduit = RepoPublishConduit(repo_id, distributor_id) call_config = PluginCallConfiguration(distributor_config, repo_distributor['config'], publish_config_override) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( repo_distributor['distributor_type_id'], repo_id, mkdir=True) # Fire events describing the publish state fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_publish_started(repo_id, distributor_id) result = RepoPublishManager._do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config) fire_manager.fire_repo_publish_finished(result) return result
def test_publish(self, mock_finished, mock_started): """ Tests publish under normal conditions when everything is configured correctly. """ # Setup publish_config = {'foo': 'bar'} self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', publish_config, False, distributor_id='dist-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor-2', publish_config, False, distributor_id='dist-2') # Test self.publish_manager.publish('repo-1', 'dist-1', None) # Verify # Database repo_distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'repo-1', 'id': 'dist-1' }) self.assertTrue(repo_distributor['last_publish'] is not None) self.assertTrue(assert_last_sync_time( repo_distributor['last_publish'])) # History entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'repo-1'})) self.assertEqual(1, len(entries)) self.assertEqual('repo-1', entries[0]['repo_id']) self.assertEqual('dist-1', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_SUCCESS, entries[0]['result']) self.assertTrue(entries[0]['summary'] is not None) self.assertTrue(entries[0]['details'] is not None) self.assertTrue(entries[0]['error_message'] is None) self.assertTrue(entries[0]['exception'] is None) self.assertTrue(entries[0]['traceback'] is None) # Call into the correct distributor call_args = mock_plugins.MOCK_DISTRIBUTOR.publish_repo.call_args[0] self.assertEqual('repo-1', call_args[0].id) self.assertTrue(call_args[1] is not None) self.assertEqual({}, call_args[2].plugin_config) self.assertEqual(publish_config, call_args[2].repo_plugin_config) self.assertEqual({}, call_args[2].override_config) self.assertEqual( 0, mock_plugins.MOCK_DISTRIBUTOR_2.publish_repo.call_count) self.assertEqual(1, mock_started.call_count) self.assertEqual('repo-1', mock_started.call_args[0][0]) self.assertEqual(1, mock_finished.call_count) self.assertEqual('repo-1', mock_finished.call_args[0][0]['repo_id'])
def clean(self): super(RepoPublishConduitTests, self).clean() mock_plugins.reset() model.Repository.drop_collection() RepoDistributor.get_collection().remove()
def clean(self): super(RepoSyncManagerTests, self).clean() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoPublishResult.get_collection().remove()
def add_distributor(repo_id, distributor_type_id, repo_plugin_config, auto_publish, distributor_id=None): """ Adds an association from the given repository to a distributor. The association will be tracked through the distributor_id; each distributor on a given repository must have a unique ID. If this is not specified, one will be generated. If a distributor already exists on the repo for the given ID, the existing one will be removed and replaced with the newly configured one. :param repo_id: identifies the repo :type repo_id: str :param distributor_type_id: identifies the distributor; must correspond to a distributor loaded at server startup :type distributor_type_id: str :param repo_plugin_config: configuration the repo will use with this distributor; may be None :type repo_plugin_config: dict :param auto_publish: if true, this distributor will be invoked at the end of every sync :type auto_publish: bool :param distributor_id: unique ID to refer to this distributor for this repo :type distributor_id: str :return: ID assigned to the distributor (only valid in conjunction with the repo) :raise MissingResource: if the given repo_id does not refer to a valid repo :raise InvalidValue: if the distributor ID is provided and unacceptable :raise InvalidDistributorConfiguration: if the distributor plugin does not accept the given configuration """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repository=repo_id) if not plugin_api.is_valid_distributor(distributor_type_id): raise InvalidValue(['distributor_type_id']) # Determine the ID for this distributor on this repo; will be # unique for all distributors on this repository but not globally if distributor_id is None: distributor_id = str(uuid.uuid4()) else: # Validate if one was passed in if not is_distributor_id_valid(distributor_id): raise InvalidValue(['distributor_id']) distributor_instance, plugin_config = plugin_api.get_distributor_by_id( distributor_type_id) # Convention is that a value of None means unset. Remove any keys that # are explicitly set to None so the plugin will default them. if repo_plugin_config is not None: clean_config = dict([(k, v) for k, v in repo_plugin_config.items() if v is not None]) else: clean_config = None # Let the distributor plugin verify the configuration call_config = PluginCallConfiguration(plugin_config, clean_config) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( distributor_type_id, repo_id) config_conduit = RepoConfigConduit(distributor_type_id) try: result = distributor_instance.validate_config( transfer_repo, call_config, config_conduit) # For backward compatibility with plugins that don't yet return the tuple if isinstance(result, bool): valid_config = result message = None else: valid_config, message = result except Exception, e: logger.exception( 'Exception received from distributor [%s] while validating config' % distributor_type_id) raise PulpDataException(e.args), None, sys.exc_info()[2]
self.distributor_manager.add_distributor('gonna-bail', 'mock-distributor', {}, False, distributor_id='bad-dist') # Test try: self.publish_manager.publish('gonna-bail', 'bad-dist') self.fail('Expected exception was not raised') except publish_manager.PulpExecutionException, e: print(e) # for coverage # Verify repo_distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'gonna-bail', 'id': 'bad-dist' }) self.assertTrue(repo_distributor is not None) self.assertTrue(assert_last_sync_time( repo_distributor['last_publish'])) entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'gonna-bail'})) self.assertEqual(1, len(entries)) self.assertEqual('gonna-bail', entries[0]['repo_id']) self.assertEqual('bad-dist', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None)
def clean(self): super(RepoQueryManagerTests, self).clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoDistributor.get_collection().remove()
def update_distributor_config(repo_id, distributor_id, distributor_config, auto_publish=None): """ Attempts to update the saved configuration for the given distributor. The distributor will be asked if the new configuration is valid. If not, this method will raise an error and the existing configuration will remain unchanged. :param repo_id: identifies the repo :type repo_id: str :param distributor_id: identifies the distributor on the repo :type distributor_id: str :param distributor_config: new configuration values to use :type distributor_config: dict :param auto_publish: If true, this distributor is used automatically during a sync operation :type auto_publish: bool :return: the updated distributor :rtype: dict :raise MissingResource: if the given repo or distributor doesn't exist :raise PulpDataException: if the plugin rejects the given changes """ distributor_coll = RepoDistributor.get_collection() repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id) repo_distributor = distributor_coll.find_one({'repo_id': repo_id, 'id': distributor_id}) if repo_distributor is None: raise MissingResource(distributor=distributor_id) distributor_type_id = repo_distributor['distributor_type_id'] distributor_instance, plugin_config = plugin_api.get_distributor_by_id(distributor_type_id) # The supplied config is a delta of changes to make to the existing config. # The plugin expects a full configuration, so we apply those changes to # the original config and pass that to the plugin's validate method. merged_config = dict(repo_distributor['config']) # The convention is that None in an update is removing the value and # setting it to the default. Find all such properties in this delta and # remove them from the existing config if they are there. unset_property_names = [k for k in distributor_config if distributor_config[k] is None] for key in unset_property_names: merged_config.pop(key, None) distributor_config.pop(key, None) # Whatever is left over are the changed/added values, so merge them in. merged_config.update(distributor_config) # Let the distributor plugin verify the configuration call_config = PluginCallConfiguration(plugin_config, merged_config) transfer_repo = repo_obj.to_transfer_repo() config_conduit = RepoConfigConduit(distributor_type_id) result = distributor_instance.validate_config(transfer_repo, call_config, config_conduit) # For backward compatibility with plugins that don't yet return the tuple if isinstance(result, bool): valid_config = result message = None else: valid_config, message = result if not valid_config: raise PulpDataException(message) # Confirm that the auto_publish value is sane before updating the value, if it exists if auto_publish is not None: if isinstance(auto_publish, bool): repo_distributor['auto_publish'] = auto_publish else: raise InvalidValue(['auto_publish']) # If we got this far, the new config is valid, so update the database repo_distributor['config'] = merged_config distributor_coll.save(repo_distributor, safe=True) return repo_distributor