def _validate_consumer_repo(consumer_id, repo_id, distributor_id): """ Validate that the given consumer, repository, and distributor are present. Rather than raising an exception, this method returns a dictionary of missing values and allows the caller to decide what exception to raise. :param consumer_id: The consumer id to validate :type consumer_id: str :param repo_id: The repository id to validate :type repo_id: str :param distributor_id: The distributor_id to validate :type distributor_id: str :return: A dictionary containing the missing values, or an empty dict if everything is valid :rtype: dict """ missing_values = {} try: factory.consumer_manager().get_consumer(consumer_id) except MissingResource: missing_values['consumer_id'] = consumer_id try: model.Repository.objects.get_repo_or_missing_resource(repo_id) except MissingResource: missing_values['repo_id'] = repo_id try: factory.repo_distributor_manager().get_distributor(repo_id, distributor_id) except MissingResource: missing_values['distributor_id'] = distributor_id return missing_values
def _validate_consumer_repo(consumer_id, repo_id, distributor_id): """ Validate that the given consumer, repository, and distributor are present. Rather than raising an exception, this method returns a dictionary of missing values and allows the caller to decide what exception to raise. :param consumer_id: The consumer id to validate :type consumer_id: str :param repo_id: The repository id to validate :type repo_id: str :param distributor_id: The distributor_id to validate :type distributor_id: str :return: A dictionary containing the missing values, or an empty dict if everything is valid :rtype: dict """ missing_values = {} try: factory.consumer_manager().get_consumer(consumer_id) except MissingResource: missing_values['consumer_id'] = consumer_id try: model.Repository.objects.get_repo_or_missing_resource(repo_id) except MissingResource: missing_values['repo_id'] = repo_id try: factory.repo_distributor_manager().get_distributor(repo_id, distributor_id) except MissingResource: missing_values['distributor_id'] = distributor_id return missing_values
def POST(self, consumer_id): """ Create a bind association between the specified consumer by id included in the URL path and a repo-distributor specified in the POST body: {repo_id:<str>, distributor_id:<str>}. Designed to be idempotent so only MissingResource is expected to be raised by manager. @param consumer_id: The consumer to bind. @type consumer_id: str @return: The list of call_reports @rtype: list """ # validate consumer consumer_manager = managers.consumer_manager() consumer_manager.get_consumer(consumer_id) # get other options and validate them body = self.params() repo_id = body.get('repo_id') distributor_id = body.get('distributor_id') binding_config = body.get('binding_config', None) options = body.get('options', {}) notify_agent = body.get('notify_agent', True) managers.repo_query_manager().get_repository(repo_id) managers.repo_distributor_manager().get_distributor( repo_id, distributor_id) # bind call_requests = bind_itinerary(consumer_id, repo_id, distributor_id, notify_agent, binding_config, options) execution.execute_multiple(call_requests)
def POST(self, consumer_id): """ Create a bind association between the specified consumer by id included in the URL path and a repo-distributor specified in the POST body: {repo_id:<str>, distributor_id:<str>}. Designed to be idempotent so only MissingResource is expected to be raised by manager. @param consumer_id: The consumer to bind. @type consumer_id: str @return: The list of call_reports @rtype: list """ # validate consumer consumer_manager = managers.consumer_manager() consumer_manager.get_consumer(consumer_id) # get other options and validate them body = self.params() repo_id = body.get('repo_id') distributor_id = body.get('distributor_id') binding_config = body.get('binding_config', None) options = body.get('options', {}) notify_agent = body.get('notify_agent', True) managers.repo_query_manager().get_repository(repo_id) managers.repo_distributor_manager().get_distributor(repo_id, distributor_id) # bind call_requests = bind_itinerary(consumer_id, repo_id, distributor_id, notify_agent, binding_config, options) execution.execute_multiple(call_requests)
def POST(self, repo_id, distributor_id): distributor_manager = manager_factory.repo_distributor_manager() distributor_manager.get_distributor(repo_id, distributor_id) schedule_options = self.params() publish_options = {'override_config': schedule_options.pop('override_config', {})} schedule_manager = manager_factory.schedule_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: {distributor_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), action_tag('create_publish_schedule')] call_request = CallRequest(schedule_manager.create_publish_schedule, [repo_id, distributor_id, publish_options, schedule_options], resources=resources, weight=weight, tags=tags, archive=True) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)
def verify_group_resources(group_id, repo_id, distributor_id): """ Confirm the group, repository, and distributor exist. :param group_id: The consumer group id to verify the existence of :type group_id: str :param repo_id: The repository id to confirm the existence of :type repo_id: str :param distributor_id: The distributor id to confirm the existence of on the repository :type distributor_id: str :return: A dictionary of the missing resources :rtype: dict """ missing_resources = {} group_manager = factory.consumer_group_query_manager() repo_manager = factory.repo_query_manager() distributor_manager = factory.repo_distributor_manager() try: group_manager.get_group(group_id) except pulp_exceptions.MissingResource: missing_resources['group_id'] = group_id repo = repo_manager.find_by_id(repo_id) if repo is None: missing_resources['repo_id'] = repo_id try: distributor_manager.get_distributor(repo_id, distributor_id) except pulp_exceptions.MissingResource: missing_resources['distributor_id'] = distributor_id return missing_resources
def _process_repos(repos, importers=False, distributors=False): """ Apply standard processing to a collection of repositories being returned to a client. Adds the object link and optionally adds related importers and distributors. :param repos: collection of repositories :type repos: list, tuple :param importers: if True, adds related importers under the attribute "importers". :type importers: bool :param distributors: if True, adds related distributors under the attribute "distributors" :type distributors: bool :return: the same list that was passed in, just for convenience. The list itself is not modified- only its members are modified in-place. :rtype: list of Repo instances """ if importers: _merge_related_objects( 'importers', manager_factory.repo_importer_manager(), repos) if distributors: _merge_related_objects( 'distributors', manager_factory.repo_distributor_manager(), repos) for repo in repos: repo['_href'] = reverse('repo_resource', kwargs={'repo_id': repo['id']}) _convert_repo_dates_to_strings(repo) # Remove internally used scratchpad from repo details if 'scratchpad' in repo: del repo['scratchpad'] return repos
def delete(self, request, repo_id, distributor_id): """ Disassociate the requested distributor. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param repo_id: The id of the repository the to disassociate from :type repo_id: str :param repo_id: The id of the distributor to disassociate :type repo_id: str :raises pulp_exceptions.OperationPostponed: dispatch a task """ # validate resources manager = manager_factory.repo_distributor_manager() manager.get_distributor(repo_id, distributor_id) task_tags = [ tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.resource_tag(tags.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), tags.action_tag('remove_distributor') ] async_result = repo_tasks.distributor_delete.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id, distributor_id], tags=task_tags) raise pulp_exceptions.OperationPostponed(async_result)
def test_update_repo_and_plugins_partial(self): """ Tests no errors are encountered when only updating some of the possible fields. """ # Setup self.manager.create_repo('repo-1', 'Original', 'Original Description') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('repo-1', 'mock-importer', {'key-i1': 'orig-1'}) distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d1': 'orig-1'}, True, distributor_id='dist-1') # Test result = self.manager.update_repo_and_plugins('repo-1', None, None, None) repo = result.return_value # Verify self.assertEqual(repo['display_name'], 'Original') importer = importer_manager.get_importer('repo-1') self.assertEqual(importer['config'], {'key-i1': 'orig-1'}) dist_1 = distributor_manager.get_distributor('repo-1', 'dist-1') self.assertEqual(dist_1['config'], {'key-d1': 'orig-1'})
def distributor_delete(repo_id, distributor_id): """ Get the itinerary for deleting a repository distributor. 1. Delete the distributor on the sever. 2. Unbind any bound consumers. :param repo_id: A repository ID. :type repo_id: str :param distributor_id: A distributor id :type distributor_id: str :return: Any errors that may have occurred and the list of tasks spawned for each consumer :rtype TaskResult """ # delete distributor manager = managers.repo_distributor_manager() manager.remove_distributor(repo_id, distributor_id) # append unbind itineraries foreach bound consumer unbind_errors = [] additional_tasks = [] options = {} manager = managers.consumer_bind_manager() for bind in manager.find_by_distributor(repo_id, distributor_id): try: report = consumer.unbind(bind["consumer_id"], bind["repo_id"], bind["distributor_id"], options) if report: additional_tasks.extend(report.spawned_tasks) except Exception, e: unbind_errors.append(e)
def _bindings(bindings): """ Build the bindings needed by the agent. The returned bindings will be the payload created by the appropriate distributor. :param bindings: a list of binding object retrieved from the database :type bindings: list :return: list of binding objects to send to the agent :rtype: list """ agent_bindings = [] for binding in bindings: repo_id = binding['repo_id'] manager = managers.repo_distributor_manager() distributor = manager.get_distributor(binding['repo_id'], binding['distributor_id']) details = manager.create_bind_payload(binding['repo_id'], binding['distributor_id'], binding['binding_config']) type_id = distributor['distributor_type_id'] agent_binding = dict(type_id=type_id, repo_id=repo_id, details=details) agent_bindings.append(agent_binding) return agent_bindings
def populate(self): config = {"key1": "value1", "key2": None} manager = factory.repo_distributor_manager() manager.add_distributor(self.REPO_ID, "mock-distributor", config, True, distributor_id=self.DISTRIBUTOR_ID) manager = factory.consumer_manager() for consumer_id in self.ALL_CONSUMERS: manager.register(consumer_id)
def test_delete_with_plugins(self): """ Tests that deleting a repo that has importers and distributors configured deletes them as well. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-2') self.assertEqual(1, len(list(RepoImporter.get_collection().find({'repo_id' : 'doomed'})))) self.assertEqual(2, len(list(RepoDistributor.get_collection().find({'repo_id' : 'doomed'})))) # Test self.manager.delete_repo('doomed') # Verify self.assertEqual(0, len(list(Repo.get_collection().find()))) self.assertEqual(0, len(list(RepoImporter.get_collection().find({'repo_id' : 'doomed'})))) self.assertEqual(0, len(list(RepoDistributor.get_collection().find({'repo_id' : 'doomed'})))) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) self.assertEqual(2, mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.call_count) repo_working_dir = common_utils.repository_working_dir('doomed', mkdir=False) self.assertTrue(not os.path.exists(repo_working_dir))
def test_update_repo_and_plugins_partial(self): """ Tests no errors are encountered when only updating some of the possible fields. """ # Setup self.manager.create_repo('repo-1', 'Original', 'Original Description') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('repo-1', 'mock-importer', {'key-i1': 'orig-1'}) distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d1' : 'orig-1'}, True, distributor_id='dist-1') # Test repo = self.manager.update_repo_and_plugins('repo-1', None, None, None) # Verify self.assertEqual(repo['display_name'], 'Original') importer = importer_manager.get_importer('repo-1') self.assertEqual(importer['config'], {'key-i1' : 'orig-1'}) dist_1 = distributor_manager.get_distributor('repo-1', 'dist-1') self.assertEqual(dist_1['config'], {'key-d1' : 'orig-1'})
def DELETE(self, repo_id, distributor_id, schedule_id): distributor_manager = manager_factory.repo_distributor_manager() schedule_list = distributor_manager.list_publish_schedules( repo_id, distributor_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, distributor=distributor_id, publish_schedule=schedule_id) schedule_manager = manager_factory.schedule_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('delete_publish_schedule') ] call_request = CallRequest(schedule_manager.delete_publish_schedule, [repo_id, distributor_id, schedule_id], tags=tags, archive=True) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_request.deletes_resource( dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) result = execution.execute(call_request) return self.ok(result)
def DELETE(self, repo_id, distributor_id): # validate resources manager = manager_factory.repo_distributor_manager() manager.get_distributor(repo_id, distributor_id) # delete call_requests = distributor_delete_itinerary(repo_id, distributor_id) execution.execute_multiple(call_requests)
def test_update_repo_and_plugins(self, distributor_update, mock_get_worker_for_reservation): """ Tests the aggregate call to update a repo and its plugins. """ mock_get_worker_for_reservation.return_value = Worker( 'some_queue', datetime.datetime.now()) self.manager.create_repo('repo-1', 'Original', 'Original Description') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('repo-1', 'mock-importer', {'key-i1': 'orig-1'}) distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d1': 'orig-1'}, True, distributor_id='dist-1') distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d2': 'orig-2'}, True, distributor_id='dist-2') # Test repo_delta = {'display_name': 'Updated'} new_importer_config = {'key-i1': 'updated-1', 'key-i2': 'new-1'} new_distributor_configs = { 'dist-1': { 'key-d1': 'updated-1' }, } # only update one of the two distributors result = self.manager.update_repo_and_plugins('repo-1', repo_delta, new_importer_config, new_distributor_configs) self.assertTrue(isinstance(result, TaskResult)) self.assertEquals(None, result.error) repo = result.return_value # Verify self.assertEqual(repo['id'], 'repo-1') self.assertEqual(repo['display_name'], 'Updated') self.assertEqual(repo['description'], 'Original Description') importer = importer_manager.get_importer('repo-1') self.assertEqual(importer['config'], new_importer_config) dist_1 = distributor_manager.get_distributor('repo-1', 'dist-1') self.assertEqual(dist_1['config'], new_distributor_configs['dist-1']) dist_2 = distributor_manager.get_distributor('repo-1', 'dist-2') self.assertEqual(dist_2['config'], {'key-d2': 'orig-2'}) # There should have been a spawned task for the new distributor config expected_task_id = dispatch.TaskStatus.get_collection().find_one( {'tags': 'pulp:repository_distributor:dist-1'})['task_id'] self.assertEqual(result.spawned_tasks, [{'task_id': expected_task_id}])
def delete(repo_id, distributor_id): """ Get the itinerary for deleting a repository distributor. 1. Delete the distributor on the sever. 2. Unbind any bound consumers. :param repo_id: A repository ID. :type repo_id: str :param distributor_id: A distributor id :type distributor_id: str :return: Any errors that may have occurred and the list of tasks spawned for each consumer :rtype TaskResult """ manager = managers.repo_distributor_manager() manager.remove_distributor(repo_id, distributor_id) # append unbind itineraries foreach bound consumer unbind_errors = [] additional_tasks = [] options = {} manager = managers.consumer_bind_manager() for bind in manager.find_by_distributor(repo_id, distributor_id): try: report = consumer.unbind(bind['consumer_id'], bind['repo_id'], bind['distributor_id'], options) if report: additional_tasks.extend(report.spawned_tasks) except Exception, e: unbind_errors.append(e)
def _process_repos(repos, importers=False, distributors=False): """ Apply standard processing to a collection of repositories being returned to a client. Adds the object link and optionally adds related importers and distributors. @param repos: collection of repositories @type repos: list, tuple @param importers: iff True, adds related importers under the attribute "importers". @type importers: bool @param distributors: iff True, adds related distributors under the attribute "distributors". @type distributors: bool @return the same list that was passed in, just for convenience. The list itself is not modified- only its members are modified in-place. @rtype list of Repo instances """ if importers: _merge_related_objects( 'importers', manager_factory.repo_importer_manager(), repos) if distributors: _merge_related_objects( 'distributors', manager_factory.repo_distributor_manager(), repos) for repo in repos: repo.update(serialization.link.search_safe_link_obj(repo['id'])) return repos
def create_publish_schedule(self, repo_id, distributor_id, publish_options, schedule_data): """ Create a new scheduled publish for the given repository and distributor. @param repo_id: @param distributor_id: @param publish_options: @param schedule_data: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) schedule_utils.validate_keys(publish_options, _PUBLISH_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the publish call args = [repo_id, distributor_id] kwargs = {'overrides': publish_options['override_config']} call_request = CallRequest(publish_itinerary, args, kwargs, weight=0) # schedule the publish scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) distributor_manager = managers_factory.repo_distributor_manager() distributor_manager.add_publish_schedule(repo_id, distributor_id, schedule_id) return schedule_id
def test_delete_with_plugins(self): """ Tests that deleting a repo that has importers and distributors configured deletes them as well. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-2') self.assertEqual(1, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(2, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) # Test self.manager.delete_repo('doomed') # Verify self.assertEqual(0, len(list(Repo.get_collection().find()))) self.assertEqual(0, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(0, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) self.assertEqual(2, mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.call_count) repo_working_dir = common_utils.repository_working_dir('doomed', mkdir=False) self.assertTrue(not os.path.exists(repo_working_dir))
def test_get_with_bindings(self): """ Test consumer with bindings. """ # Setup manager = factory.repo_manager() repo = manager.create_repo(self.REPO_ID) manager = factory.repo_distributor_manager() manager.add_distributor( self.REPO_ID, self.DISTRIBUTOR_TYPE_ID, {}, True, distributor_id=self.DISTRIBUTOR_ID) manager = factory.consumer_manager() manager.register(self.CONSUMER_ID) manager = factory.consumer_bind_manager() bind = manager.bind(self.CONSUMER_ID, self.REPO_ID, self.DISTRIBUTOR_ID) # Test params = {'bindings':True} path = '/v2/consumers/%s/' % self.CONSUMER_ID status, body = self.get(path, params=params) # Verify self.assertEqual(200, status) self.assertEqual(self.CONSUMER_ID, body['id']) self.assertTrue('_href' in body) self.assertTrue(body['_href'].endswith(path)) self.assertTrue('bindings' in body) bindings = body['bindings'] self.assertEquals(len(bindings), 1) self.assertEquals(bindings[0]['repo_id'], self.REPO_ID) self.assertEquals(bindings[0]['distributor_id'], self.DISTRIBUTOR_ID) self.assertEquals(bindings[0]['consumer_actions'], [])
def PUT(self, repo_id, distributor_id, schedule_id): distributor_manager = manager_factory.repo_distributor_manager() schedule_list = distributor_manager.list_publish_schedules(repo_id, distributor_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, distributor=distributor_id, publish_schedule=schedule_id) publish_update = {} schedule_update = self.params() if 'override_config' in schedule_update: publish_update['override_config'] = schedule_update.pop('override_config') schedule_manager = manager_factory.schedule_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: {distributor_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_SCHEDULE_TYPE: {schedule_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_publish_schedule')] call_request = CallRequest(schedule_manager.update_publish_schedule, [repo_id, distributor_id, schedule_id, publish_update, schedule_update], resources=resources, tags=tags, archive=True) execution.execute(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def test_delete_with_plugin_error(self): """ Tests deleting a repo where one (or more) of the plugins raises an error. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') # Setup both mocks to raise errors on removal mock_plugins.MOCK_IMPORTER.importer_removed.side_effect = Exception( 'Splat') mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.side_effect = Exception( 'Pow') # Test try: self.manager.delete_repo('doomed') self.fail('No exception raised during repo delete') except exceptions.PulpExecutionException, e: pass
def POST(self, repo_id): # Params (validation will occur in the manager) params = self.params() distributor_type = params.get('distributor_type_id', None) distributor_config = params.get('distributor_config', None) distributor_id = params.get('distributor_id', None) auto_publish = params.get('auto_publish', False) # Update the repo distributor_manager = manager_factory.repo_distributor_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('add_distributor')] if distributor_id is not None: resources.update({dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: {distributor_id: dispatch_constants.RESOURCE_CREATE_OPERATION}}) tags.append(resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id)) call_request = CallRequest(distributor_manager.add_distributor, [repo_id, distributor_type, distributor_config, auto_publish, distributor_id], resources=resources, weight=weight, tags=tags) return execution.execute_created(self, call_request, distributor_id)
def _get_distributor_instance_and_config(repo_id, distributor_id): repo_distributor_manager = manager_factory.repo_distributor_manager() repo_distributor = repo_distributor_manager.get_distributor( repo_id, distributor_id) distributor, config = plugin_api.get_distributor_by_id( repo_distributor['distributor_type_id']) return distributor, config
def verify(self, num_units=PluginTestBase.NUM_UNITS): # repository manager = managers.repo_query_manager() manager.get_repository(self.REPO_ID) # importer manager = managers.repo_importer_manager() importer = manager.get_importer(self.REPO_ID) manifest_url = importer['config'][constants.MANIFEST_URL_KEYWORD] self.assertTrue(manifest_url.endswith('%s/manifest.json.gz' % self.REPO_ID)) # distributor manager = managers.repo_distributor_manager() manager.get_distributor(self.REPO_ID, FAKE_DISTRIBUTOR) self.assertRaises(MissingResource, manager.get_distributor, self.REPO_ID, constants.HTTP_DISTRIBUTOR) # check units manager = managers.repo_unit_association_query_manager() units = manager.get_units(self.REPO_ID) units = dict([(u['metadata']['N'], u) for u in units]) self.assertEqual(len(units), num_units) for n in range(0, num_units): unit = units[n] unit_id = self.UNIT_ID % n metadata = unit['metadata'] storage_path = metadata['_storage_path'].replace('//', '/') self.assertEqual(unit['unit_type_id'], self.UNIT_TYPE_ID) self.assertEqual(unit['repo_id'], self.REPO_ID) self.assertEqual(unit['owner_id'], constants.HTTP_IMPORTER) file_path = '.'.join((unit_id, self.UNIT_TYPE_ID)) self.assertEqual(storage_path, os.path.join(self.childfs, 'content', file_path)) self.assertTrue(os.path.exists(storage_path)) fp = open(storage_path) content = fp.read() fp.close() self.assertEqual(content, unit_id)
def POST(self, repo_id): # Params (validation will occur in the manager) params = self.params() distributor_type = params.get('distributor_type_id', None) distributor_config = params.get('distributor_config', None) distributor_id = params.get('distributor_id', None) auto_publish = params.get('auto_publish', False) # Update the repo distributor_manager = manager_factory.repo_distributor_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('add_distributor')] if distributor_id is not None: tags.append(resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id)) call_request = CallRequest(distributor_manager.add_distributor, [repo_id, distributor_type], {'repo_plugin_config': distributor_config, 'auto_publish': auto_publish, 'distributor_id': distributor_id}, weight=weight, tags=tags, kwarg_blacklist=['repo_plugin_config']) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) if distributor_id is not None: call_request.creates_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) return execution.execute_created(self, call_request, distributor_id)
def DELETE(self, repo_id, distributor_id): # validate resources manager = manager_factory.repo_distributor_manager() manager.get_distributor(repo_id, distributor_id) # delete call_requests = distributor_delete_itinerary(repo_id, distributor_id) execution.execute_multiple(call_requests)
def populate(self, strategy=constants.DEFAULT_STRATEGY, ssl=False): PluginTestBase.populate(self) # register child manager = managers.consumer_manager() manager.register(self.PULP_ID, notes={constants.STRATEGY_NOTE_KEY: strategy}) manager = managers.repo_importer_manager() # add importer importer_conf = { constants.MANIFEST_URL_KEYWORD: 'http://redhat.com', constants.STRATEGY_KEYWORD: constants.DEFAULT_STRATEGY, constants.PROTOCOL_KEYWORD: 'file', } manager.set_importer(self.REPO_ID, constants.HTTP_IMPORTER, importer_conf) # add distributors if ssl: dist_conf = self.dist_conf_with_ssl() else: dist_conf = self.dist_conf() manager = managers.repo_distributor_manager() manager.add_distributor( self.REPO_ID, constants.HTTP_DISTRIBUTOR, dist_conf, False, constants.HTTP_DISTRIBUTOR) manager.add_distributor(self.REPO_ID, FAKE_DISTRIBUTOR, {}, False, FAKE_DISTRIBUTOR) # bind conf = {constants.STRATEGY_KEYWORD: strategy} manager = managers.consumer_bind_manager() manager.bind(self.PULP_ID, self.REPO_ID, constants.HTTP_DISTRIBUTOR, False, conf)
def delete(repo_id): """ Delete a repository and inform other affected collections. :param repo_id: id of the repository to delete. :type repo_id: str :raise pulp_exceptions.PulpExecutionException: if any part of the process fails; the exception will contain information on which sections failed :return: A TaskResult object with the details of any errors or spawned tasks :rtype: pulp.server.async.tasks.TaskResult """ # With so much going on during a delete, it's possible that a few things could go wrong while # others are successful. We track lesser errors that shouldn't abort the entire process until # the end and then raise an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user will have to look at # the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _logger.exception( 'Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append(e)
def GET(self, id): """ Looks for query parameters 'importers' and 'distributors', and will add the corresponding fields to the repository returned. Query parameter 'details' is equivalent to passing both 'importers' and 'distributors'. """ query_params = web.input() query_manager = manager_factory.repo_query_manager() repo = query_manager.find_by_id(id) if repo is None: raise exceptions.MissingResource(repo=id) repo.update(serialization.link.current_link_obj()) _convert_repo_dates_to_strings(repo) if query_params.get('details', False): query_params['importers'] = True query_params['distributors'] = True if query_params.get('importers', False): repo = _merge_related_objects( 'importers', manager_factory.repo_importer_manager(), (repo, ))[0] if query_params.get('distributors', False): repo = _merge_related_objects( 'distributors', manager_factory.repo_distributor_manager(), (repo, ))[0] return self.ok(repo)
def PUT(self, repo_id, distributor_id): """ Used to update a repo distributor instance. This requires update permissions. The expected parameters are 'distributor_config', which is a dictionary containing configuration values accepted by the distributor type, and 'delta', which is a dictionary containing other configuration values for the distributor (like the auto_publish flag, for example). Currently, the only supported key in the delta is 'auto_publish', which should have a boolean value. :param repo_id: The repository ID :type repo_id: str :param distributor_id: The unique distributor ID of the distributor instance to update. :type distributor_id: str """ params = self.params() delta = params.get('delta', None) # validate manager = manager_factory.repo_distributor_manager() manager.get_distributor(repo_id, distributor_id) config = params.get('distributor_config') if config is None: _LOG.error( 'Missing configuration when updating distributor [%s] on repository [%s]', distributor_id, repo_id) raise exceptions.MissingValue(['distributor_config']) # update call_requests = distributor_update_itinerary(repo_id, distributor_id, config, delta) execution.execute_multiple(call_requests)
def delete(repo_id): """ Delete a repository and inform other affected collections. :param repo_id: id of the repository to delete. :type repo_id: str :raise pulp_exceptions.PulpExecutionException: if any part of the process fails; the exception will contain information on which sections failed :return: A TaskResult object with the details of any errors or spawned tasks :rtype: pulp.server.async.tasks.TaskResult """ # With so much going on during a delete, it's possible that a few things could go wrong while # others are successful. We track lesser errors that shouldn't abort the entire process until # the end and then raise an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user will have to look at # the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _logger.exception('Error received removing importer [%s] from repo [%s]' % ( repo_importer['importer_type_id'], repo_id)) error_tuples.append(e)
def GET(self, id): """ Looks for query parameters 'importers' and 'distributors', and will add the corresponding fields to the repository returned. Query parameter 'details' is equivalent to passing both 'importers' and 'distributors'. """ query_params = web.input() query_manager = manager_factory.repo_query_manager() repo = query_manager.find_by_id(id) if repo is None: raise exceptions.MissingResource(id) repo.update(serialization.link.current_link_obj()) if query_params.get('details', False): query_params['importers'] = True query_params['distributors'] = True if query_params.get('importers', False): repo = _merge_related_objects('importers', manager_factory.repo_importer_manager(), (repo,))[0] if query_params.get('distributors', False): repo = _merge_related_objects('distributors', manager_factory.repo_distributor_manager(), (repo,))[0] return self.ok(repo)
def _unbindings(bindings): """ Build the (un)bindings needed by the agent. :param bindings: A list of binding IDs. Each binding is: {consumer_id:<str>, repo_id:<str>, distributor_id:<str>} :type bindings: list :return: A list of agent bindings. Each unbinding is: {type_id:<str>, repo_id:<str>} :rtype: list """ agent_bindings = [] for binding in bindings: manager = managers.repo_distributor_manager() try: distributor = manager.get_distributor( binding['repo_id'], binding['distributor_id']) type_id = distributor['distributor_type_id'] except MissingResource: # In case the distributor was already deleted from the server. type_id = None agent_binding = dict(type_id=type_id, repo_id=binding['repo_id']) agent_bindings.append(agent_binding) return agent_bindings
def _unbindings(bindings): """ Build the (un)bindings needed by the agent. :param bindings: A list of binding IDs. Each binding is: {consumer_id:<str>, repo_id:<str>, distributor_id:<str>} :type bindings: list :return: A list of agent bindings. Each unbinding is: {type_id:<str>, repo_id:<str>} :rtype: list """ agent_bindings = [] for binding in bindings: manager = managers.repo_distributor_manager() try: distributor = manager.get_distributor( binding['repo_id'], binding['distributor_id']) type_id = distributor['distributor_type_id'] except MissingResource: # In case the distributor was already deleted from the server. type_id = None agent_binding = dict(type_id=type_id, repo_id=binding['repo_id']) agent_bindings.append(agent_binding) return agent_bindings
def setUp(self): super(RepoConfigConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() self.repo_manager = manager_factory.repo_manager() self.distributor_manager = manager_factory.repo_distributor_manager() # Populate the database with a repo with units self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', {"relative_url": "/a/bc/d"}, True, distributor_id='dist-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', {"relative_url": "/a/c"}, True, distributor_id='dist-2') self.repo_manager.create_repo('repo-2') self.distributor_manager.add_distributor('repo-2', 'mock-distributor', {"relative_url": "/a/bc/e"}, True, distributor_id='dist-3') self.repo_manager.create_repo('repo-3') self.distributor_manager.add_distributor('repo-3', 'mock-distributor', {}, True, distributor_id='dist-4') self.repo_manager.create_repo('repo-4') self.distributor_manager.add_distributor('repo-4', 'mock-distributor', {"relative_url": "/repo-5"}, True, distributor_id='dist-5') self.conduit = RepoConfigConduit('rpm')
def bind(self, consumer_id, repo_id, distributor_id): """ Bind consumer to a specific distributor associated with a repository. This call is idempotent. @param consumer_id: uniquely identifies the consumer. @type consumer_id: str @param repo_id: uniquely identifies the repository. @type repo_id: str @param distributor_id: uniquely identifies a distributor. @type distributor_id: str @return: The Bind object @rtype: SON @raise MissingResource: when given consumer does not exist. """ manager = factory.consumer_manager() manager.get_consumer(consumer_id) manager = factory.repo_distributor_manager() distributor = manager.get_distributor(repo_id, distributor_id) bind = Bind(consumer_id, repo_id, distributor_id) collection = Bind.get_collection() try: collection.save(bind, safe=True) bind = self.get_bind(consumer_id, repo_id, distributor_id) except DuplicateKeyError: # idempotent pass manager = factory.consumer_agent_manager() manager.bind(consumer_id, repo_id) consumer_event_details = {"repo_id": repo_id, "distributor_id": distributor_id} factory.consumer_history_manager().record_event(consumer_id, "repo_bound", consumer_event_details) return bind
def _bindings(bindings): """ Build the bindings needed by the agent. The returned bindings will be the payload created by the appropriate distributor. :param bindings: a list of binding object retrieved from the database :type bindings: list :return: list of binding objects to send to the agent :rtype: list """ agent_bindings = [] for binding in bindings: repo_id = binding['repo_id'] manager = managers.repo_distributor_manager() distributor = manager.get_distributor( binding['repo_id'], binding['distributor_id']) details = manager.create_bind_payload( binding['repo_id'], binding['distributor_id'], binding['binding_config']) type_id = distributor['distributor_type_id'] agent_binding = dict(type_id=type_id, repo_id=repo_id, details=details) agent_bindings.append(agent_binding) return agent_bindings
def setUp(self): super(RepoConfigConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() self.repo_manager = manager_factory.repo_manager() self.distributor_manager = manager_factory.repo_distributor_manager() # Populate the database with a repo with units self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor( 'repo-1', 'mock-distributor', {"relative_url": "/a/bc/d"}, True, distributor_id='dist-1') self.distributor_manager.add_distributor( 'repo-1', 'mock-distributor', {"relative_url": "/a/c"}, True, distributor_id='dist-2') self.repo_manager.create_repo('repo-2') self.distributor_manager.add_distributor( 'repo-2', 'mock-distributor', {"relative_url": "/a/bc/e"}, True, distributor_id='dist-3') self.repo_manager.create_repo('repo-3') self.distributor_manager.add_distributor('repo-3', 'mock-distributor', {}, True, distributor_id='dist-4') self.repo_manager.create_repo('repo-4') self.distributor_manager.add_distributor( 'repo-4', 'mock-distributor', {"relative_url": "repo-5"}, True, distributor_id='dist-5') self.repo_manager.create_repo('repo-5') self.distributor_manager.add_distributor( 'repo-5', 'mock-distributor', {"relative_url": "a/bcd/e"}, True, distributor_id='dist-1') self.repo_manager.create_repo('repo-6') self.distributor_manager.add_distributor( 'repo-6', 'mock-distributor', {"relative_url": "a/bcde/f/"}, True, distributor_id='dist-1') self.conduit = RepoConfigConduit('rpm')
def create_publish_schedule(self, repo_id, distributor_id, publish_options, schedule_data): """ Create a new scheduled publish for the given repository and distributor. @param repo_id: @param distributor_id: @param publish_options: @param schedule_data: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) self._validate_keys(publish_options, _PUBLISH_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the publish call publish_manager = managers_factory.repo_publish_manager() args = [repo_id, distributor_id] kwargs = {'publish_config_override': publish_options['override_config']} weight = pulp_config.config.getint('tasks', 'publish_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id)] call_request = CallRequest(publish_manager.publish, args, kwargs, weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, publish_manager.prep_publish) # schedule the publish scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) distributor_manager = managers_factory.repo_distributor_manager() distributor_manager.add_publish_schedule(repo_id, distributor_id, schedule_id) return schedule_id
def __unbindings(self, bindings): """ Build the (un)bindings needed by the agent. @param bindings: A list of binding IDs. Each binding is: {consumer_id:<str>, repo_id:<str>, distributor_id:<str>} @type bindings: list @return A list of agent bindings. Each unbinding is: {type_id:<str>, repo_id:<str>} @rtype: list """ agent_bindings = [] for binding in bindings: manager = managers.repo_distributor_manager() try: distributor = manager.get_distributor( binding['repo_id'], binding['distributor_id']) type_id = distributor['distributor_type_id'] except MissingResource: # may have been deleted type_id = None agent_binding = dict(type_id=type_id, repo_id=binding['repo_id']) agent_bindings.append(agent_binding) return agent_bindings
def __bindings(self, bindings): """ Build the bindings needed by the agent. @param bindings: A list of binding IDs. Each binding is: {consumer_id:<str>, repo_id:<str>, distributor_id:<str>} @type bindings: list @return A list of agent bindings. Each binding is: {type_id:<str>, repo_id:<str>, details:<dict>} @rtype: list """ agent_bindings = [] for binding in bindings: repo_id = binding['repo_id'] manager = managers.repo_distributor_manager() distributor = manager.get_distributor( binding['repo_id'], binding['distributor_id']) details = manager.create_bind_payload( binding['repo_id'], binding['distributor_id']) type_id = distributor['distributor_type_id'] agent_binding = dict(type_id=type_id, repo_id=repo_id, details=details) agent_bindings.append(agent_binding) return agent_bindings
def bind(self, consumer_id, repo_id, distributor_id): """ Bind consumer to a specific distributor associated with a repository. This call is idempotent. @param consumer_id: uniquely identifies the consumer. @type consumer_id: str @param repo_id: uniquely identifies the repository. @type repo_id: str @param distributor_id: uniquely identifies a distributor. @type distributor_id: str @return: The Bind object @rtype: SON @raise MissingResource: when given consumer does not exist. """ # ensure the consumer is valid manager = factory.consumer_manager() manager.get_consumer(consumer_id) # ensure the repository & distributor are valid manager = factory.repo_distributor_manager() manager.get_distributor(repo_id, distributor_id) # perform the bind collection = Bind.get_collection() try: bind = Bind(consumer_id, repo_id, distributor_id) collection.save(bind, safe=True) except DuplicateKeyError: self.__reset_bind(consumer_id, repo_id, distributor_id) # fetch the inserted/updated bind bind = self.get_bind(consumer_id, repo_id, distributor_id) # update history details = {'repo_id':repo_id, 'distributor_id':distributor_id} manager = factory.consumer_history_manager() manager.record_event(consumer_id, 'repo_bound', details) return bind
def test_delete_with_plugin_error(self): """ Tests deleting a repo where one (or more) of the plugins raises an error. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') # Setup both mocks to raise errors on removal mock_plugins.MOCK_IMPORTER.importer_removed.side_effect = Exception('Splat') mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.side_effect = Exception('Pow') # Test try: self.manager.delete_repo('doomed') self.fail('No exception raised during repo delete') except exceptions.PulpExecutionException: pass # Cleanup - need to manually clear the side effects mock_plugins.MOCK_IMPORTER.importer_removed.side_effect = None mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.side_effect = None
def verify_group_resources(group_id, repo_id, distributor_id): """ Confirm the group, repository, and distributor exist. :param group_id: The consumer group id to verify the existence of :type group_id: str :param repo_id: The repository id to confirm the existence of :type repo_id: str :param distributor_id: The distributor id to confirm the existence of on the repository :type distributor_id: str :return: A dictionary of the missing resources :rtype: dict """ missing_resources = {} group_manager = factory.consumer_group_query_manager() repo_manager = factory.repo_query_manager() distributor_manager = factory.repo_distributor_manager() try: group_manager.get_group(group_id) except pulp_exceptions.MissingResource: missing_resources['group_id'] = group_id repo = repo_manager.find_by_id(repo_id) if repo is None: missing_resources['repo_id'] = repo_id try: distributor_manager.get_distributor(repo_id, distributor_id) except pulp_exceptions.MissingResource: missing_resources['distributor_id'] = distributor_id return missing_resources
def update_repo_and_plugins(self, repo_id, repo_delta, importer_config, distributor_configs): """ Aggregate method that will update one or more of the following: * Repository metadata * Importer config * Zero or more distributors on the repository All of the above pieces do not need to be specified. If a piece is omitted it's configuration is not touched, nor is it removed from the repository. The same holds true for the distributor_configs dict, not every distributor must be represented. This call will attempt the updates in the order listed above. If an exception occurs during any of these steps, the updates stop and the exception is immediately raised. Any updates that have already taken place are not rolled back. This call will call out to RepoImporterManager.update_importer_config and RepoDistributorManager.update_distributor_config. Documentation for those methods, especially possible exceptions, should be consulted for more information. :param repo_id: unique identifier for the repo :type repo_id: str :param repo_delta: list of attributes and their new values to change; if None, no attempt to update the repo's metadata will be made :type repo_delta: dict, None :param importer_config: new configuration to use for the repo's importer; if None, no attempt will be made to update the importer :type importer_config: dict, None :param distributor_configs: mapping of distributor ID to the new configuration to set for it :type distributor_configs: dict, None :return: updated repository object, same as returned from update_repo """ # Repo Update if repo_delta is None: repo_delta = {} repo = self.update_repo(repo_id, repo_delta) # Importer Update if importer_config is not None: importer_manager = manager_factory.repo_importer_manager() importer_manager.update_importer_config(repo_id, importer_config) # Distributor Update if distributor_configs is not None: distributor_manager = manager_factory.repo_distributor_manager() for dist_id, dist_config in distributor_configs.items(): distributor_manager.update_distributor_config( repo_id, dist_id, dist_config) return repo
def setUp(self): super(RepoQueryManagerTests, self).setUp() mock_plugins.install() self.repo_manager = manager_factory.repo_manager() self.importer_manager = manager_factory.repo_importer_manager() self.distributor_manager = manager_factory.repo_distributor_manager() self.query_manager = manager_factory.repo_query_manager()
def populate_repository(self): config = {'key1': 'value1', 'key2': None} manager = factory.repo_distributor_manager() manager.add_distributor(self.REPO_ID, 'mock-distributor', config, True, distributor_id=self.DISTRIBUTOR_ID)
def distributor_update_itinerary(repo_id, distributor_id, config): """ Get the itinerary for updating a repository distributor. 1. Update the distributor on the sever. 2. (re)bind any bound consumers. @param repo_id: A repository ID. @type repo_id: str @return: A list of call_requests known as an itinerary. @rtype list """ call_requests = [] # update the distributor manager = managers.repo_distributor_manager() resources = { dispatch_constants.RESOURCE_REPOSITORY_TYPE: { repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION }, dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: { distributor_id: dispatch_constants.RESOURCE_UPDATE_OPERATION } } tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), action_tag('update_distributor') ] update_request = CallRequest(manager.update_distributor_config, [repo_id, distributor_id], {'distributor_config': config}, resources=resources, tags=tags, archive=True, kwarg_blacklist=['distributor_config']) call_requests.append(update_request) # append unbind itineraries foreach bound consumer options = {} manager = managers.consumer_bind_manager() for bind in manager.find_by_distributor(repo_id, distributor_id): bind_requests = bind_itinerary(bind['consumer_id'], bind['repo_id'], bind['distributor_id'], bind['notify_agent'], bind['binding_config'], options) if bind_requests: bind_requests[0].depends_on(update_request.id) call_requests.extend(bind_requests) return call_requests
def populate(self): manager = factory.repo_manager() manager.create_repo(self.REPO_ID) manager = factory.repo_distributor_manager() manager.add_distributor(self.REPO_ID, self.DISTRIBUTOR_TYPE_ID, {}, True, distributor_id=self.DISTRIBUTOR_ID) mock_plugins.MOCK_DISTRIBUTOR.create_consumer_payload.return_value = self.PAYLOAD manager = factory.consumer_manager() manager.register(self.CONSUMER_ID)
def populate(self): config = {'key1': 'value1', 'key2': None} manager = factory.repo_distributor_manager() manager.add_distributor(self.REPO_ID, 'mock-distributor', config, True, distributor_id=self.DISTRIBUTOR_ID) manager = factory.consumer_manager() for consumer_id in self.ALL_CONSUMERS: manager.register(consumer_id)
def _get_distributor_instance_and_config(self, repo_id, distributor_id): repo_distributor_manager = manager_factory.repo_distributor_manager() try: repo_distributor = repo_distributor_manager.get_distributor( repo_id, distributor_id) distributor, config = plugin_api.get_distributor_by_id( repo_distributor['distributor_type_id']) except (MissingResource, plugin_exceptions.PluginNotFound): distributor = None config = None return distributor, config
def distributor_delete_itinerary(repo_id, distributor_id): """ Get the itinerary for deleting a repository distributor. 1. Delete the distributor on the sever. 2. Unbind any bound consumers. @param repo_id: A repository ID. @type repo_id: str @return: A list of call_requests known as an itinerary. @rtype list """ call_requests = [] # delete distributor manager = managers.repo_distributor_manager() resources = { dispatch_constants.RESOURCE_REPOSITORY_TYPE: { repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION }, dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: { distributor_id: dispatch_constants.RESOURCE_DELETE_OPERATION } } tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), action_tag('remove_distributor') ] delete_request = CallRequest(manager.remove_distributor, [repo_id, distributor_id], resources=resources, tags=tags, archive=True) call_requests.append(delete_request) # append unbind itineraries foreach bound consumer options = {} manager = managers.consumer_bind_manager() for bind in manager.find_by_distributor(repo_id, distributor_id): unbind_requests = unbind_itinerary(bind['consumer_id'], bind['repo_id'], bind['distributor_id'], options) if unbind_requests: unbind_requests[0].depends_on(delete_request.id) call_requests.extend(unbind_requests) return call_requests
def delete_repo(self, repo_id): """ Deletes the given repository, optionally requesting the associated importer clean up any content in the repository. :param repo_id: identifies the repo being deleted :type repo_id: str :raise MissingResource: if the given repo does not exist :raise OperationFailed: if any part of the delete process fails; the exception will contain information on which sections failed """ # Validation found = Repo.get_collection().find_one({'id': repo_id}) if found is None: raise MissingResource(repo_id) # With so much going on during a delete, it's possible that a few things # could go wrong while others are successful. We track lesser errors # that shouldn't abort the entire process until the end and then raise # an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user # will have to look at the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments # Remove and scheduled activities scheduler = dispatch_factory.scheduler() importer_manager = manager_factory.repo_importer_manager() importers = importer_manager.get_importers(repo_id) if importers: for schedule_id in importer_manager.list_sync_schedules(repo_id): scheduler.remove(schedule_id) distributor_manager = manager_factory.repo_distributor_manager() for distributor in distributor_manager.get_distributors(repo_id): for schedule_id in distributor_manager.list_publish_schedules( repo_id, distributor['id']): scheduler.remove(schedule_id) # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _LOG.exception( 'Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append((_('Importer Delete Error'), e.args))
def serialize(bind, include_details=True): """ Construct a REST object to be returned. Add _href and augments information used by the caller to consume published content. @param bind: A bind model/SON object. @type bind: dict/SON @return: A bind REST object. {consumer_id:<str>, repo_id:<str>, distributor_id:<str>, href:<str>, type_id:<str>, details:<dict>} @rtype: dict """ # bind serialized = dict(bind) consumer_id = bind['consumer_id'] repo_id = bind['repo_id'] distributor_id = bind['distributor_id'] # href # 1019155 - Make sure the binding URL points to: # /pulp/api/v2/consumers/<consumer_id>/bindings/<repo_id>/<distributor_id/ href_url = '%s/consumers/%s/bindings/%s/%s/' % ( http.API_V2_HREF, consumer_id, repo_id, distributor_id) href = link.link_obj(href_url) serialized.update(href) repo_distributor_manager = manager_factory.repo_distributor_manager() # type_id try: distributor = repo_distributor_manager.get_distributor( repo_id, distributor_id) except MissingResource: if include_details: raise else: serialized['type_id'] = distributor['distributor_type_id'] # details if include_details: details = repo_distributor_manager.create_bind_payload( repo_id, distributor_id, bind['binding_config']) serialized['details'] = details return serialized
def setUp(self): super(RepoPublishConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() self.repo_manager = manager_factory.repo_manager() self.distributor_manager = manager_factory.repo_distributor_manager() # Populate the database with a repo with units self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', {}, True, distributor_id='dist-1') self.conduit = RepoPublishConduit('repo-1', 'dist-1')
def populate(self): manager = factory.repo_manager() manager.create_repo(self.REPO_ID) manager = factory.repo_distributor_manager() manager.add_distributor(self.REPO_ID, self.DISTRIBUTOR_TYPE_ID, {}, True, distributor_id=self.DISTRIBUTOR_ID) manager = factory.consumer_manager() manager.register(self.CONSUMER_ID) manager = factory.consumer_bind_manager() manager.bind(self.CONSUMER_ID, self.REPO_ID, self.DISTRIBUTOR_ID, self.NOTIFY_AGENT, self.BINDING_CONFIG)
def validate_distributor(repo_id, distributor_id): """ Validate that the distributor exists for the specified repo :param repo_id: unique ID for a repository :type repo_id: basestring :param distributor_id: unique ID for a distributor :type distributor_id: basestring :raise: pulp.server.exceptions.MissingResource """ distributor_manager = managers_factory.repo_distributor_manager() distributor_manager.get_distributor(repo_id, distributor_id)