def setUp(self): super(DependencyManagerTests, self).setUp() mock_plugins.install() database.update_database([TYPE_1_DEF]) self.repo_id = 'dep-repo' self.manager = manager_factory.dependency_manager() manager_factory.repo_manager().create_repo(self.repo_id) manager_factory.repo_importer_manager().set_importer(self.repo_id, 'mock-importer', {})
def setUp(self): super(DependencyManagerTests, self).setUp() mock_plugins.install() database.update_database([TYPE_1_DEF]) self.repo_id = 'dep-repo' self.manager = manager_factory.dependency_manager() manager_factory.repo_manager().create_repo(self.repo_id) manager_factory.repo_importer_manager().set_importer( self.repo_id, 'mock-importer', {})
def put(self, request, repo_id, importer_id): """ Associate an importer to a repository. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param repo_id: The id of the repository :type repo_id: str :param importer_id: The id of the importer to associate :type importer_id: str :raises pulp_exceptions.MissingValue: if required param importer_config is not in the body :raises pulp_exceptions.MissingResource: if importer does not match the repo's importer :raises pulp_exceptions.OperationPostponed: dispatch a task """ importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer['id'] != importer_id: raise pulp_exceptions.MissingResource(importer_id=importer_id) importer_config = request.body_as_json.get('importer_config', None) if importer_config is None: raise pulp_exceptions.MissingValue(['importer_config']) task_tags = [tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.resource_tag(tags.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), tags.action_tag('update_importer')] async_result = repo_importer_manager.update_importer_config.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id], {'importer_config': importer_config}, tags=task_tags) raise pulp_exceptions.OperationPostponed(async_result)
def delete(self, request, repo_id, importer_id): """ Remove an importer from a repository. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param repo_id: The id of the repository to remove the importer from :type repo_id: str :param importer_id: The id of the importer to remove from the given repository :type importer_id: str :raises pulp_exceptions.MissingResource: if importer cannot be found for this repo :raises pulp_exceptions.OperationPostponed: to dispatch a task to delete the importer """ importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer['id'] != importer_id: raise pulp_exceptions.MissingResource(importer_id=importer_id) task_tags = [tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.resource_tag(tags.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), tags.action_tag('delete_importer')] async_result = repo_importer_manager.remove_importer.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id], tags=task_tags) raise pulp_exceptions.OperationPostponed(async_result)
def post(self, request, repo_id): """ Associate an importer with a repository. This will validate that the repository exists and that there is an importer with the importer_type_id given. However, the importer configuration validation only checks the provided values against a standard set of importer configuration keys. The importer specific validation is called on association, so any type specific configuration will be validated later. This means the spawned task could fail with a validation error. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param repo_id: the repository to associate the importer with :type repo_id: str :raises pulp_exceptions.OperationPostponed: dispatch a task """ importer_type = request.body_as_json.get('importer_type_id', None) config = request.body_as_json.get('importer_config', None) # Validation occurs within the manager importer_manager = manager_factory.repo_importer_manager() importer_manager.validate_importer_config(repo_id, importer_type, config) task_tags = [tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.action_tag('add_importer')] async_result = repo_importer_manager.set_importer.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id, importer_type], {'repo_plugin_config': config}, tags=task_tags) raise pulp_exceptions.OperationPostponed(async_result)
def get(self, request, repo_id): """ Looks for query parameters 'importers' and 'distributors', and will add the corresponding fields to the repository returned. Query parameter 'details' is equivalent to passing both 'importers' and 'distributors'. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param repo_id: id of requested repository :type repo_id: str :return: Response containing a serialized dict for the requested repo. :rtype : django.http.HttpResponse :raises pulp_exceptions.MissingResource: if repo cannot be found """ query_manager = manager_factory.repo_query_manager() repo = query_manager.find_by_id(repo_id) if repo is None: raise pulp_exceptions.MissingResource(repo=repo_id) repo['_href'] = reverse('repo_resource', kwargs={'repo_id': repo_id}) _convert_repo_dates_to_strings(repo) details = request.GET.get('details', 'false').lower() == 'true' if request.GET.get('importers', 'false').lower() == 'true' or details: repo = _merge_related_objects( 'importers', manager_factory.repo_importer_manager(), (repo,))[0] if request.GET.get('distributors', 'false').lower() == 'true' or details: repo = _merge_related_objects( 'distributors', manager_factory.repo_distributor_manager(), (repo,))[0] return generate_json_response_with_pulp_encoder(repo)
def POST(self, repo_id): """ Associate an importer with a repository. This will validate that the repository exists and that there is an importer with the importer_type_id given. However, the importer configuration validation only checks the provided values against a standard set of importer configuration keys. The importer specific validation is called on association, so any type specific configuration will be validated later. This means the spawned task could fail with a validation error. :param repo_id: the repository to associate the importer with :type repo_id: str """ params = self.params() importer_type = params.get('importer_type_id', None) config = params.get('importer_config', None) # This call will raise the appropriate exception importer_manager = manager_factory.repo_importer_manager() importer_manager.validate_importer_config(repo_id, importer_type, config) # Note: If an importer exists, it's removed, so no need to handle 409s. # Note: If the plugin raises an exception during initialization, let it # bubble up and be handled like any other 500. task_tags = [tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.action_tag('add_importer')] async_result = set_importer.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id, importer_type], {'repo_plugin_config': config}, tags=task_tags) raise exceptions.OperationPostponed(async_result)
def create_sync_schedule(self, repo_id, importer_id, sync_options, schedule_data): """ Create a new sync schedule for a given repository using the given importer. @param repo_id: @param importer_id: @param sync_options: @param schedule_data: @return: """ # validate the input self._validate_importer(repo_id, importer_id) schedule_utils.validate_keys(sync_options, _SYNC_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the sync call request args = [repo_id] kwargs = {'overrides': sync_options['override_config']} call_request = CallRequest(sync_with_auto_publish_itinerary, args, kwargs, weight=0) # schedule the sync scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) importer_manager = managers_factory.repo_importer_manager() importer_manager.add_sync_schedule(repo_id, schedule_id) return schedule_id
def __init__(self, source_repo_id, dest_repo_id, source_importer_id, dest_importer_id): """ :param source_repo_id: ID of the repository from which units are being copied :type source_repo_id: str :param dest_repo_id: ID of the repository into which units are being copied :type dest_repo_id: str :param source_importer_id: ID of the importer on the source repository :type source_importer_id: str :param dest_importer_id: ID of the importer on the destination repository :type dest_importer_id: str """ ImporterScratchPadMixin.__init__(self, dest_repo_id, dest_importer_id) RepoScratchPadMixin.__init__(self, dest_repo_id, ImporterConduitException) SearchUnitsMixin.__init__(self, ImporterConduitException) AddUnitMixin.__init__(self, dest_repo_id, dest_importer_id) self.source_repo_id = source_repo_id self.dest_repo_id = dest_repo_id self.source_importer_id = source_importer_id self.dest_importer_id = dest_importer_id self.__association_manager = manager_factory.repo_unit_association_manager( ) self.__association_query_manager = manager_factory.repo_unit_association_query_manager( ) self.__importer_manager = manager_factory.repo_importer_manager()
def _process_repos(repos, importers=False, distributors=False): """ Apply standard processing to a collection of repositories being returned to a client. Adds the object link and optionally adds related importers and distributors. :param repos: collection of repositories :type repos: list, tuple :param importers: if True, adds related importers under the attribute "importers". :type importers: bool :param distributors: if True, adds related distributors under the attribute "distributors" :type distributors: bool :return: the same list that was passed in, just for convenience. The list itself is not modified- only its members are modified in-place. :rtype: list of Repo instances """ if importers: _merge_related_objects( 'importers', manager_factory.repo_importer_manager(), repos) if distributors: _merge_related_objects( 'distributors', manager_factory.repo_distributor_manager(), repos) for repo in repos: repo['_href'] = reverse('repo_resource', kwargs={'repo_id': repo['id']}) _convert_repo_dates_to_strings(repo) # Remove internally used scratchpad from repo details if 'scratchpad' in repo: del repo['scratchpad'] return repos
def PUT(self, repo_id, importer_id): # Raise a MissingResource exception if the repo or the importer doesn't exist importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer['id'] != importer_id: raise exceptions.MissingResource(importer_id=importer_id) if not plugin_api.is_valid_importer(importer_id): raise exceptions.PulpCodedValidationException( error_code=error_codes.PLP1008) params = self.params() importer_config = params.get('importer_config', None) if importer_config is None: _logger.error( 'Missing configuration updating importer for repository [%s]' % repo_id) raise exceptions.MissingValue(['importer_config']) task_tags = [ tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.resource_tag(tags.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), tags.action_tag('update_importer') ] async_result = update_importer_config.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id], {'importer_config': importer_config}, tags=task_tags) raise exceptions.OperationPostponed(async_result)
def test_post(self): # Setup upload_id = self.upload_manager.initialize_upload() self.upload_manager.save_data(upload_id, 0, 'string data') repo_manager = manager_factory.repo_manager() repo_manager.create_repo('repo-upload') importer_manager = manager_factory.repo_importer_manager() importer_manager.set_importer('repo-upload', 'dummy-importer', {}) # Test body = { 'upload_id': upload_id, 'unit_type_id': 'dummy-type', 'unit_key': { 'name': 'foo' }, 'unit_metadata': { 'stuff': 'bar' }, } status, body = self.post( '/v2/repositories/repo-upload/actions/import_upload/', body) # Verify self.assertEqual(200, status)
def POST(self, repo_id): # Params (validation will occur in the manager) params = self.params() importer_type = params.get('importer_type_id', None) importer_config = params.get('importer_config', None) if importer_type is None: _LOG.error( 'Missing importer type adding importer to repository [%s]' % repo_id) raise exceptions.MissingValue(['importer_type']) # Note: If an importer exists, it's removed, so no need to handle 409s. # Note: If the plugin raises an exception during initialization, let it # bubble up and be handled like any other 500. importer_manager = manager_factory.repo_importer_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('add_importer') ] call_request = CallRequest(importer_manager.set_importer, [repo_id, importer_type], {'repo_plugin_config': importer_config}, weight=weight, tags=tags, kwarg_blacklist=['repo_plugin_config']) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return execution.execute_sync_created(self, call_request, 'importer')
def test_update_repo_and_plugins(self, distributor_update, mock_get_worker_for_reservation): """ Tests the aggregate call to update a repo and its plugins. """ mock_get_worker_for_reservation.return_value = Worker( 'some_queue', datetime.datetime.now()) self.manager.create_repo('repo-1', 'Original', 'Original Description') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('repo-1', 'mock-importer', {'key-i1': 'orig-1'}) distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d1': 'orig-1'}, True, distributor_id='dist-1') distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d2': 'orig-2'}, True, distributor_id='dist-2') # Test repo_delta = {'display_name': 'Updated'} new_importer_config = {'key-i1': 'updated-1', 'key-i2': 'new-1'} new_distributor_configs = { 'dist-1': { 'key-d1': 'updated-1' }, } # only update one of the two distributors result = self.manager.update_repo_and_plugins('repo-1', repo_delta, new_importer_config, new_distributor_configs) self.assertTrue(isinstance(result, TaskResult)) self.assertEquals(None, result.error) repo = result.return_value # Verify self.assertEqual(repo['id'], 'repo-1') self.assertEqual(repo['display_name'], 'Updated') self.assertEqual(repo['description'], 'Original Description') importer = importer_manager.get_importer('repo-1') self.assertEqual(importer['config'], new_importer_config) dist_1 = distributor_manager.get_distributor('repo-1', 'dist-1') self.assertEqual(dist_1['config'], new_distributor_configs['dist-1']) dist_2 = distributor_manager.get_distributor('repo-1', 'dist-2') self.assertEqual(dist_2['config'], {'key-d2': 'orig-2'}) # There should have been a spawned task for the new distributor config expected_task_id = dispatch.TaskStatus.get_collection().find_one( {'tags': 'pulp:repository_distributor:dist-1'})['task_id'] self.assertEqual(result.spawned_tasks, [{'task_id': expected_task_id}])
def GET(self, id): """ Looks for query parameters 'importers' and 'distributors', and will add the corresponding fields to the repository returned. Query parameter 'details' is equivalent to passing both 'importers' and 'distributors'. """ query_params = web.input() query_manager = manager_factory.repo_query_manager() repo = query_manager.find_by_id(id) if repo is None: raise exceptions.MissingResource(id) repo.update(serialization.link.current_link_obj()) if query_params.get('details', False): query_params['importers'] = True query_params['distributors'] = True if query_params.get('importers', False): repo = _merge_related_objects('importers', manager_factory.repo_importer_manager(), (repo,))[0] if query_params.get('distributors', False): repo = _merge_related_objects('distributors', manager_factory.repo_distributor_manager(), (repo,))[0] return self.ok(repo)
def test_post(self, _reserve_resource, mock_apply_async): # Setup task_id = str(uuid.uuid4()) mock_apply_async.return_value = AsyncResult(task_id) _reserve_resource.return_value = ReservedResourceApplyAsync() upload_id = self.upload_manager.initialize_upload() self.upload_manager.save_data(upload_id, 0, 'string data') repo_manager = manager_factory.repo_manager() repo_manager.create_repo('repo-upload') importer_manager = manager_factory.repo_importer_manager() importer_manager.set_importer('repo-upload', 'dummy-importer', {}) # Test body = { 'upload_id' : upload_id, 'unit_type_id' : 'dummy-type', 'unit_key' : {'name' : 'foo'}, 'unit_metadata' : {'stuff' : 'bar'}, } status, body = self.post('/v2/repositories/repo-upload/actions/import_upload/', body) # Verify self.assertEqual(202, status) assert_body_matches_async_task(body, mock_apply_async.return_value) exepcted_call_args = ['repo-upload', 'dummy-type', {'name': 'foo'}, {'stuff': 'bar'}, upload_id] self.assertEqual(exepcted_call_args, mock_apply_async.call_args[0][0])
def remove_from_importer(repo_id, transfer_units): # Retrieve the repo from the database and convert to the transfer repo repo_query_manager = manager_factory.repo_query_manager() repo = repo_query_manager.get_repository(repo_id) importer_manager = manager_factory.repo_importer_manager() repo_importer = importer_manager.get_importer(repo_id) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.importer_working_dir(repo_importer['importer_type_id'], repo_id, mkdir=True) # Retrieve the plugin instance to invoke importer_instance, plugin_config = plugin_api.get_importer_by_id( repo_importer['importer_type_id']) call_config = PluginCallConfiguration(plugin_config, repo_importer['config']) # Invoke the importer's remove method try: importer_instance.remove_units(transfer_repo, transfer_units, call_config) except Exception: msg = _('Exception from importer [%(i)s] while removing units from repo [%(r)s]') msg = msg % {'i': repo_importer['id'], 'r': repo_id} logger.exception(msg)
def _process_repos(repos, importers=False, distributors=False): """ Apply standard processing to a collection of repositories being returned to a client. Adds the object link and optionally adds related importers and distributors. @param repos: collection of repositories @type repos: list, tuple @param importers: iff True, adds related importers under the attribute "importers". @type importers: bool @param distributors: iff True, adds related distributors under the attribute "distributors". @type distributors: bool @return the same list that was passed in, just for convenience. The list itself is not modified- only its members are modified in-place. @rtype list of Repo instances """ if importers: _merge_related_objects( 'importers', manager_factory.repo_importer_manager(), repos) if distributors: _merge_related_objects( 'distributors', manager_factory.repo_distributor_manager(), repos) for repo in repos: repo.update(serialization.link.search_safe_link_obj(repo['id'])) return repos
def delete(repo_id): """ Delete a repository and inform other affected collections. :param repo_id: id of the repository to delete. :type repo_id: str :raise pulp_exceptions.PulpExecutionException: if any part of the process fails; the exception will contain information on which sections failed :return: A TaskResult object with the details of any errors or spawned tasks :rtype: pulp.server.async.tasks.TaskResult """ # With so much going on during a delete, it's possible that a few things could go wrong while # others are successful. We track lesser errors that shouldn't abort the entire process until # the end and then raise an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user will have to look at # the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _logger.exception('Error received removing importer [%s] from repo [%s]' % ( repo_importer['importer_type_id'], repo_id)) error_tuples.append(e)
def test_delete_with_plugin_error(self): """ Tests deleting a repo where one (or more) of the plugins raises an error. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') # Setup both mocks to raise errors on removal mock_plugins.MOCK_IMPORTER.importer_removed.side_effect = Exception( 'Splat') mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.side_effect = Exception( 'Pow') # Test try: self.manager.delete_repo('doomed') self.fail('No exception raised during repo delete') except exceptions.PulpExecutionException, e: pass
def PUT(self, repo_id, importer_id): # Params (validation will occur in the manager) params = self.params() importer_config = params.get('importer_config', None) if importer_config is None: _LOG.error( 'Missing configuration updating importer for repository [%s]' % repo_id) raise exceptions.MissingValue(['importer_config']) importer_manager = manager_factory.repo_importer_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('update_importer') ] call_request = CallRequest(importer_manager.update_importer_config, [repo_id], {'importer_config': importer_config}, tags=tags, archive=True, kwarg_blacklist=['importer_config']) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) result = execution.execute(call_request) return self.ok(result)
def test_delete_with_plugin_error(self): """ Tests deleting a repo where one (or more) of the plugins raises an error. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') # Setup both mocks to raise errors on removal mock_plugins.MOCK_IMPORTER.importer_removed.side_effect = Exception('Splat') mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.side_effect = Exception('Pow') # Test try: self.manager.delete_repo('doomed') self.fail('No exception raised during repo delete') except exceptions.PulpExecutionException: pass # Cleanup - need to manually clear the side effects mock_plugins.MOCK_IMPORTER.importer_removed.side_effect = None mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.side_effect = None
def populate(self, strategy=constants.DEFAULT_STRATEGY, ssl=False): PluginTestBase.populate(self) # register child manager = managers.consumer_manager() manager.register(self.PULP_ID, notes={constants.STRATEGY_NOTE_KEY: strategy}) manager = managers.repo_importer_manager() # add importer importer_conf = { constants.MANIFEST_URL_KEYWORD: 'http://redhat.com', constants.STRATEGY_KEYWORD: constants.DEFAULT_STRATEGY, constants.PROTOCOL_KEYWORD: 'file', } manager.set_importer(self.REPO_ID, constants.HTTP_IMPORTER, importer_conf) # add distributors if ssl: dist_conf = self.dist_conf_with_ssl() else: dist_conf = self.dist_conf() manager = managers.repo_distributor_manager() manager.add_distributor( self.REPO_ID, constants.HTTP_DISTRIBUTOR, dist_conf, False, constants.HTTP_DISTRIBUTOR) manager.add_distributor(self.REPO_ID, FAKE_DISTRIBUTOR, {}, False, FAKE_DISTRIBUTOR) # bind conf = {constants.STRATEGY_KEYWORD: strategy} manager = managers.consumer_bind_manager() manager.bind(self.PULP_ID, self.REPO_ID, constants.HTTP_DISTRIBUTOR, False, conf)
def test_delete_with_plugins(self): """ Tests that deleting a repo that has importers and distributors configured deletes them as well. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-2') self.assertEqual(1, len(list(RepoImporter.get_collection().find({'repo_id' : 'doomed'})))) self.assertEqual(2, len(list(RepoDistributor.get_collection().find({'repo_id' : 'doomed'})))) # Test self.manager.delete_repo('doomed') # Verify self.assertEqual(0, len(list(Repo.get_collection().find()))) self.assertEqual(0, len(list(RepoImporter.get_collection().find({'repo_id' : 'doomed'})))) self.assertEqual(0, len(list(RepoDistributor.get_collection().find({'repo_id' : 'doomed'})))) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) self.assertEqual(2, mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.call_count) repo_working_dir = common_utils.repository_working_dir('doomed', mkdir=False) self.assertTrue(not os.path.exists(repo_working_dir))
def test_update_repo_and_plugins_partial(self): """ Tests no errors are encountered when only updating some of the possible fields. """ # Setup self.manager.create_repo('repo-1', 'Original', 'Original Description') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('repo-1', 'mock-importer', {'key-i1': 'orig-1'}) distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d1' : 'orig-1'}, True, distributor_id='dist-1') # Test repo = self.manager.update_repo_and_plugins('repo-1', None, None, None) # Verify self.assertEqual(repo['display_name'], 'Original') importer = importer_manager.get_importer('repo-1') self.assertEqual(importer['config'], {'key-i1' : 'orig-1'}) dist_1 = distributor_manager.get_distributor('repo-1', 'dist-1') self.assertEqual(dist_1['config'], {'key-d1' : 'orig-1'})
def __init__(self, source_repo_id, dest_repo_id, source_importer_id, dest_importer_id): """ :param source_repo_id: ID of the repository from which units are being copied :type source_repo_id: str :param dest_repo_id: ID of the repository into which units are being copied :type dest_repo_id: str :param source_importer_id: ID of the importer on the source repository :type source_importer_id: str :param dest_importer_id: ID of the importer on the destination repository :type dest_importer_id: str """ ImporterScratchPadMixin.__init__(self, dest_repo_id, dest_importer_id) RepoScratchPadMixin.__init__(self, dest_repo_id, ImporterConduitException) SearchUnitsMixin.__init__(self, ImporterConduitException) AddUnitMixin.__init__(self, dest_repo_id, dest_importer_id) self.source_repo_id = source_repo_id self.dest_repo_id = dest_repo_id self.source_importer_id = source_importer_id self.dest_importer_id = dest_importer_id self.__association_manager = manager_factory.repo_unit_association_manager() self.__association_query_manager = manager_factory.repo_unit_association_query_manager() self.__importer_manager = manager_factory.repo_importer_manager()
def test_update_repo_and_plugins_partial(self): """ Tests no errors are encountered when only updating some of the possible fields. """ # Setup self.manager.create_repo('repo-1', 'Original', 'Original Description') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('repo-1', 'mock-importer', {'key-i1': 'orig-1'}) distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d1': 'orig-1'}, True, distributor_id='dist-1') # Test result = self.manager.update_repo_and_plugins('repo-1', None, None, None) repo = result.return_value # Verify self.assertEqual(repo['display_name'], 'Original') importer = importer_manager.get_importer('repo-1') self.assertEqual(importer['config'], {'key-i1': 'orig-1'}) dist_1 = distributor_manager.get_distributor('repo-1', 'dist-1') self.assertEqual(dist_1['config'], {'key-d1': 'orig-1'})
def DELETE(self, repo_id, importer_id, schedule_id): importer_manager = manager_factory.repo_importer_manager() schedule_list = importer_manager.list_sync_schedules(repo_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, importer=importer_id, publish_schedule=schedule_id) schedule_manager = manager_factory.schedule_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('delete_sync_schedule') ] call_request = CallRequest(schedule_manager.delete_sync_schedule, [repo_id, importer_id, schedule_id], tags=tags, archive=True) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) call_request.deletes_resource( dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) result = execution.execute(call_request) return self.ok(result)
def delete(repo_id): """ Delete a repository and inform other affected collections. :param repo_id: id of the repository to delete. :type repo_id: str :raise pulp_exceptions.PulpExecutionException: if any part of the process fails; the exception will contain information on which sections failed :return: A TaskResult object with the details of any errors or spawned tasks :rtype: pulp.server.async.tasks.TaskResult """ # With so much going on during a delete, it's possible that a few things could go wrong while # others are successful. We track lesser errors that shouldn't abort the entire process until # the end and then raise an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user will have to look at # the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _logger.exception( 'Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append(e)
def remove_from_importer(repo_id, transfer_units): # Retrieve the repo from the database and convert to the transfer repo repo_query_manager = manager_factory.repo_query_manager() repo = repo_query_manager.get_repository(repo_id) importer_manager = manager_factory.repo_importer_manager() repo_importer = importer_manager.get_importer(repo_id) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.importer_working_dir( repo_importer['importer_type_id'], repo_id, mkdir=True) # Retrieve the plugin instance to invoke importer_instance, plugin_config = plugin_api.get_importer_by_id( repo_importer['importer_type_id']) call_config = PluginCallConfiguration(plugin_config, repo_importer['config']) # Invoke the importer's remove method try: importer_instance.remove_units(transfer_repo, transfer_units, call_config) except Exception: msg = _( 'Exception from importer [%(i)s] while removing units from repo [%(r)s]' ) msg = msg % {'i': repo_importer['id'], 'r': repo_id} logger.exception(msg)
def create_sync_schedule(self, repo_id, importer_id, sync_options, schedule_data): """ Create a new sync schedule for a given repository using the given importer. @param repo_id: @param importer_id: @param sync_options: @param schedule_data: @return: """ # validate the input self._validate_importer(repo_id, importer_id) self._validate_keys(sync_options, _SYNC_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the sync call request sync_manager = managers_factory.repo_sync_manager() args = [repo_id] kwargs = {'sync_config_override': sync_options['override_config']} weight = pulp_config.config.getint('tasks', 'sync_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id)] call_request = CallRequest(sync_manager.sync, args, kwargs, weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, sync_manager.prep_sync) # schedule the sync scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) importer_manager = managers_factory.repo_importer_manager() importer_manager.add_sync_schedule(repo_id, schedule_id) return schedule_id
def setUp(self): base.PulpServerTests.setUp(self) mock_plugins.install() self.upload_manager = manager_factory.content_upload_manager() self.repo_manager = manager_factory.repo_manager() self.importer_manager = manager_factory.repo_importer_manager()
def verify(self, num_units=PluginTestBase.NUM_UNITS): # repository manager = managers.repo_query_manager() manager.get_repository(self.REPO_ID) # importer manager = managers.repo_importer_manager() importer = manager.get_importer(self.REPO_ID) manifest_url = importer['config'][constants.MANIFEST_URL_KEYWORD] self.assertTrue(manifest_url.endswith('%s/manifest.json.gz' % self.REPO_ID)) # distributor manager = managers.repo_distributor_manager() manager.get_distributor(self.REPO_ID, FAKE_DISTRIBUTOR) self.assertRaises(MissingResource, manager.get_distributor, self.REPO_ID, constants.HTTP_DISTRIBUTOR) # check units manager = managers.repo_unit_association_query_manager() units = manager.get_units(self.REPO_ID) units = dict([(u['metadata']['N'], u) for u in units]) self.assertEqual(len(units), num_units) for n in range(0, num_units): unit = units[n] unit_id = self.UNIT_ID % n metadata = unit['metadata'] storage_path = metadata['_storage_path'].replace('//', '/') self.assertEqual(unit['unit_type_id'], self.UNIT_TYPE_ID) self.assertEqual(unit['repo_id'], self.REPO_ID) self.assertEqual(unit['owner_id'], constants.HTTP_IMPORTER) file_path = '.'.join((unit_id, self.UNIT_TYPE_ID)) self.assertEqual(storage_path, os.path.join(self.childfs, 'content', file_path)) self.assertTrue(os.path.exists(storage_path)) fp = open(storage_path) content = fp.read() fp.close() self.assertEqual(content, unit_id)
def GET(self, id): """ Looks for query parameters 'importers' and 'distributors', and will add the corresponding fields to the repository returned. Query parameter 'details' is equivalent to passing both 'importers' and 'distributors'. """ query_params = web.input() query_manager = manager_factory.repo_query_manager() repo = query_manager.find_by_id(id) if repo is None: raise exceptions.MissingResource(repo=id) repo.update(serialization.link.current_link_obj()) _convert_repo_dates_to_strings(repo) if query_params.get('details', False): query_params['importers'] = True query_params['distributors'] = True if query_params.get('importers', False): repo = _merge_related_objects( 'importers', manager_factory.repo_importer_manager(), (repo, ))[0] if query_params.get('distributors', False): repo = _merge_related_objects( 'distributors', manager_factory.repo_distributor_manager(), (repo, ))[0] return self.ok(repo)
def POST(self, repo_id): # Params (validation will occur in the manager) params = self.params() importer_type = params.get('importer_type_id', None) importer_config = params.get('importer_config', None) if importer_type is None: _LOG.exception('Missing importer type adding importer to repository [%s]' % repo_id) raise exceptions.MissingValue(['importer_type']) # Note: If an importer exists, it's removed, so no need to handle 409s. # Note: If the plugin raises an exception during initialization, let it # bubble up and be handled like any other 500. importer_manager = manager_factory.repo_importer_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('add_importer')] call_request = CallRequest(importer_manager.set_importer, [repo_id, importer_type, importer_config], resources=resources, weight=weight, tags=tags) return execution.execute_sync_created(self, call_request, 'importer')
def POST(self, repo_id): """ Associate an importer with a repository. This will validate that the repository exists and that there is an importer with the importer_type_id given. However, the importer configuration validation only checks the provided values against a standard set of importer configuration keys. The importer specific validation is called on association, so any type specific configuration will be validated later. This means the spawned task could fail with a validation error. :param repo_id: the repository to associate the importer with :type repo_id: str """ params = self.params() importer_type = params.get('importer_type_id', None) config = params.get('importer_config', None) # This call will raise the appropriate exception importer_manager = manager_factory.repo_importer_manager() importer_manager.validate_importer_config(repo_id, importer_type, config) # Note: If an importer exists, it's removed, so no need to handle 409s. # Note: If the plugin raises an exception during initialization, let it # bubble up and be handled like any other 500. task_tags = [ tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.action_tag('add_importer') ] async_result = set_importer.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id, importer_type], {'repo_plugin_config': config}, tags=task_tags) raise exceptions.OperationPostponed(async_result)
def PUT(self, repo_id, importer_id, schedule_id): importer_manager = manager_factory.repo_importer_manager() schedule_list = importer_manager.list_sync_schedules(repo_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, importer=importer_id, publish_schedule=schedule_id) sync_updates = {} schedule_updates = self.params() if 'override_config' in schedule_updates: sync_updates['override_config'] = schedule_updates.pop('override_config') schedule_manager = manager_factory.schedule_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE: {importer_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_SCHEDULE_TYPE: {schedule_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_sync_schedule')] call_request = CallRequest(schedule_manager.update_sync_schedule, [repo_id, importer_id, schedule_id, sync_updates, schedule_updates], resources=resources, tags=tags, archive=True) execution.execute(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def test_delete_with_plugins(self): """ Tests that deleting a repo that has importers and distributors configured deletes them as well. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-2') self.assertEqual(1, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(2, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) # Test self.manager.delete_repo('doomed') # Verify self.assertEqual(0, len(list(Repo.get_collection().find()))) self.assertEqual(0, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(0, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) self.assertEqual(2, mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.call_count) repo_working_dir = common_utils.repository_working_dir('doomed', mkdir=False) self.assertTrue(not os.path.exists(repo_working_dir))
def is_valid_upload(repo_id, unit_type_id): """ Checks that the repository is configured to handle an upload request for the given unit type ID. This should be called prior to beginning the upload to prevent a wasted effort in the bits uploading. :param repo_id: identifies the repo into which the unit is being uploaded :param unit_type_id: type of unit being uploaded :return: true if the repository can attempt to handle the unit :rtype: bool :raise MissingResource: if the repository or its importer do not exist """ importer_manager = manager_factory.repo_importer_manager() # Will raise an appropriate exception if it cannot be found repo_importer = importer_manager.get_importer(repo_id) # Make sure the importer on the repo can support the indicated type importer_types = plugin_api.list_importer_types(repo_importer['importer_type_id'])['types'] if unit_type_id not in importer_types: raise PulpDataException('Invalid unit type for repository') return True
def __init__(self, source_repo_id, dest_repo_id, source_importer_id, dest_importer_id, association_owner_type, association_owner_id): """ :param source_repo_id: ID of the repository from which units are being copied :type source_repo_id: str :param dest_repo_id: ID of the repository into which units are being copied :type dest_repo_id: str :param source_importer_id: ID of the importer on the source repository :type source_importer_id: str :param dest_importer_id: ID of the importer on the destination repository :type dest_importer_id: str :param association_owner_type: distinguishes the owner when creating an association through this conduit :type association_owner_type: str :param association_owner_id: specific ID of the owner when creating an association through this conduit :type association_owner_id: str """ ImporterScratchPadMixin.__init__(self, dest_repo_id, dest_importer_id) RepoScratchPadMixin.__init__(self, dest_repo_id, ImporterConduitException) SearchUnitsMixin.__init__(self, ImporterConduitException) AddUnitMixin.__init__(self, dest_repo_id, dest_importer_id, association_owner_type, association_owner_id) self.source_repo_id = source_repo_id self.dest_repo_id = dest_repo_id self.source_importer_id = source_importer_id self.dest_importer_id = dest_importer_id self.association_owner_type = association_owner_type self.association_owner_id = association_owner_id self.__association_manager = manager_factory.repo_unit_association_manager() self.__association_query_manager = manager_factory.repo_unit_association_query_manager() self.__importer_manager = manager_factory.repo_importer_manager()
def test_post_with_override_config(self, mock_get_worker_for_reservation, mock_uuid, mock_apply_async): # Setup uuid_list = [uuid.uuid4() for i in range(10)] mock_uuid.uuid4.side_effect = copy.deepcopy(uuid_list) expected_async_result = AsyncResult(str(uuid_list[0])) mock_get_worker_for_reservation.return_value = Worker('some_queue', datetime.datetime.now()) upload_id = self.upload_manager.initialize_upload() self.upload_manager.save_data(upload_id, 0, 'string data') repo_manager = manager_factory.repo_manager() repo_manager.create_repo('repo-upload') importer_manager = manager_factory.repo_importer_manager() importer_manager.set_importer('repo-upload', 'dummy-importer', {}) # Test test_override_config = {'key1': 'value1', 'key2': 'value2'} body = { 'upload_id' : upload_id, 'unit_type_id' : 'dummy-type', 'unit_key' : {'name' : 'foo'}, 'unit_metadata' : {'stuff' : 'bar'}, 'override_config': test_override_config, } status, body = self.post('/v2/repositories/repo-upload/actions/import_upload/', body) # Verify self.assertEqual(202, status) assert_body_matches_async_task(body, expected_async_result) exepcted_call_args = ['repo-upload', 'dummy-type', {'name': 'foo'}, {'stuff': 'bar'}, upload_id, test_override_config] self.assertEqual(exepcted_call_args, mock_apply_async.call_args[0][0])
def remove_from_importer(repo_id, removed_units): # Retrieve the repo from the database and convert to the transfer repo repo_query_manager = manager_factory.repo_query_manager() repo = repo_query_manager.get_repository(repo_id) importer_manager = manager_factory.repo_importer_manager() repo_importer = importer_manager.get_importer(repo_id) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.importer_working_dir(repo_importer['importer_type_id'], repo_id, mkdir=True) # Convert the units into transfer units unit_type_ids = calculate_associated_type_ids(repo_id, removed_units) transfer_units = create_transfer_units(removed_units, unit_type_ids) # Retrieve the plugin instance to invoke importer_instance, plugin_config = plugin_api.get_importer_by_id(repo_importer['importer_type_id']) call_config = PluginCallConfiguration(plugin_config, repo_importer['config']) # Invoke the importer's remove method try: importer_instance.remove_units(transfer_repo, transfer_units, call_config) except Exception: _LOG.exception('Exception from importer [%s] while removing units from repo [%s]' % (repo_importer['id'], repo_id))
def PUT(self, repo_id, importer_id): # Params (validation will occur in the manager) params = self.params() importer_config = params.get('importer_config', None) if importer_config is None: _LOG.error('Missing configuration updating importer for repository [%s]' % repo_id) raise exceptions.MissingValue(['importer_config']) importer_manager = manager_factory.repo_importer_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE: {importer_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('update_importer')] call_request = CallRequest(importer_manager.update_importer_config, [repo_id], {'importer_config': importer_config}, resources=resources, tags=tags, archive=True, kwarg_blacklist=['importer_config']) result = execution.execute(call_request) return self.ok(result)
def sync(self, repo_id, sync_config_override=None): """ Performs a synchronize operation on the given repository. The given repo must have an importer configured. The identity of the importer is not a parameter to this call; if multiple importers are eventually supported this will have to change to indicate which importer to use. This method is intentionally limited to synchronizing a single repo. Performing multiple repository syncs concurrently will require a more global view of the server and must be handled outside the scope of this class. @param repo_id: identifies the repo to sync @type repo_id: str @param sync_config_override: optional config containing values to use for this sync only @type sync_config_override: dict @raise MissingResource: if repo_id does not refer to a valid repo @raise OperationFailed: if the given repo does not have an importer set """ repo_coll = Repo.get_collection() # Validation repo = repo_coll.find_one({'id' : repo_id}) if repo is None: raise MissingResource(repo_id) importer_instance, importer_config = self._get_importer_instance_and_config(repo_id) if importer_instance is None: raise MissingResource(repo_id) dispatch_context = dispatch_factory.context() dispatch_context.set_cancel_control_hook(importer_instance.cancel_sync_repo) importer_manager = manager_factory.repo_importer_manager() repo_importer = importer_manager.get_importer(repo_id) # Assemble the data needed for the sync conduit = RepoSyncConduit(repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_IMPORTER, repo_importer['id']) call_config = PluginCallConfiguration(importer_config, repo_importer['config'], sync_config_override) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.importer_working_dir(repo_importer['importer_type_id'], repo_id, mkdir=True) # Fire an events around the call fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_sync_started(repo_id) sync_result = self._do_sync(repo, importer_instance, transfer_repo, conduit, call_config) fire_manager.fire_repo_sync_finished(sync_result) dispatch_context.clear_cancel_control_hook() if sync_result['result'] == RepoSyncResult.RESULT_FAILED: raise PulpExecutionException(_('Importer indicated a failed response'))
def is_valid_upload(self, repo_id, unit_type_id): """ Checks that the repository is configured to handle an upload request for the given unit type ID. This should be called prior to beginning the upload to prevent a wasted effort in the bits uploading. @param repo_id: identifies the repo into which the unit is being uploaded @param unit_type_id: type of unit being uploaded @return: true if the repository can attempt to handle the unit @rtype: bool @raise MissingResource: if the repository or its importer do not exist """ importer_manager = manager_factory.repo_importer_manager() # Will raise an appropriate exception if it cannot be found repo_importer = importer_manager.get_importer(repo_id) # Make sure the importer on the repo can support the indicated type importer_types = plugin_api.list_importer_types(repo_importer['importer_type_id'])['types'] if unit_type_id not in importer_types: raise PulpDataException('Invalid unit type for repository') return True
def POST(self, repo_id, importer_id): importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer_id != importer['id']: raise exceptions.MissingResource(importer=importer_id) schedule_options = self.params() sync_options = {'override_config': schedule_options.pop('override_config', {})} schedule_manager = manager_factory.schedule_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE: {importer_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('create_sync_schedule')] call_request = CallRequest(schedule_manager.create_sync_schedule, [repo_id, importer_id, sync_options, schedule_options], resources=resources, weight=weight, tags=tags, archive=True) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)
def update_repo_and_plugins(self, repo_id, repo_delta, importer_config, distributor_configs): """ Aggregate method that will update one or more of the following: * Repository metadata * Importer config * Zero or more distributors on the repository All of the above pieces do not need to be specified. If a piece is omitted it's configuration is not touched, nor is it removed from the repository. The same holds true for the distributor_configs dict, not every distributor must be represented. This call will attempt the updates in the order listed above. If an exception occurs during any of these steps, the updates stop and the exception is immediately raised. Any updates that have already taken place are not rolled back. This call will call out to RepoImporterManager.update_importer_config and RepoDistributorManager.update_distributor_config. Documentation for those methods, especially possible exceptions, should be consulted for more information. :param repo_id: unique identifier for the repo :type repo_id: str :param repo_delta: list of attributes and their new values to change; if None, no attempt to update the repo's metadata will be made :type repo_delta: dict, None :param importer_config: new configuration to use for the repo's importer; if None, no attempt will be made to update the importer :type importer_config: dict, None :param distributor_configs: mapping of distributor ID to the new configuration to set for it :type distributor_configs: dict, None :return: updated repository object, same as returned from update_repo """ # Repo Update if repo_delta is None: repo_delta = {} repo = self.update_repo(repo_id, repo_delta) # Importer Update if importer_config is not None: importer_manager = manager_factory.repo_importer_manager() importer_manager.update_importer_config(repo_id, importer_config) # Distributor Update if distributor_configs is not None: distributor_manager = manager_factory.repo_distributor_manager() for dist_id, dist_config in distributor_configs.items(): distributor_manager.update_distributor_config( repo_id, dist_id, dist_config) return repo
def resolve_dependencies_by_units(repo_id, units, options): """ Calculates dependencies for the given set of units in the given repository. :param repo_id: identifies the repository :type repo_id: str :param units: list of database representations of units to resolve dependencies for :type units: list :param options: dict of options to pass the importer to drive the resolution :type options: dict or None :return: report from the plugin :rtype: object :raise MissingResource: if the repo does not exist or does not have an importer """ # Validation repo_query_manager = manager_factory.repo_query_manager() importer_manager = manager_factory.repo_importer_manager() # The following will raise MissingResource as appropriate repo = repo_query_manager.get_repository(repo_id) repo_importer = importer_manager.get_importer(repo_id) try: importer_instance, plugin_config = plugin_api.get_importer_by_id( repo_importer['importer_type_id']) except plugin_exceptions.PluginNotFound: raise MissingResource(repo_id), None, sys.exc_info()[2] # Package for the importer call call_config = PluginCallConfiguration(plugin_config, repo_importer['config'], options) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.importer_working_dir( repo_importer['importer_type_id'], repo_id, mkdir=True) conduit = DependencyResolutionConduit(repo_id, repo_importer['id']) # Convert all of the units into the plugin standard representation transfer_units = [] # Preload all the type defs so we don't hammer the database unnecessarily type_defs = {} all_type_def_ids = set([u['unit_type_id'] for u in units]) for def_id in all_type_def_ids: type_def = types_db.type_definition(def_id) type_defs[def_id] = type_def for unit in units: type_id = unit['unit_type_id'] u = conduit_common_utils.to_plugin_associated_unit(unit, type_defs[type_id]) transfer_units.append(u) # Invoke the importer try: dep_report = importer_instance.resolve_dependencies(transfer_repo, transfer_units, conduit, call_config) except Exception, e: raise PulpExecutionException(), None, sys.exc_info()[2]
def setUp(self): super(RepoQueryManagerTests, self).setUp() mock_plugins.install() self.repo_manager = manager_factory.repo_manager() self.importer_manager = manager_factory.repo_importer_manager() self.distributor_manager = manager_factory.repo_distributor_manager() self.query_manager = manager_factory.repo_query_manager()
def _get_importer_instance_and_config(self, repo_id): importer_manager = manager_factory.repo_importer_manager() try: repo_importer = importer_manager.get_importer(repo_id) importer, config = plugin_api.get_importer_by_id(repo_importer['importer_type_id']) except (MissingResource, plugin_exceptions.PluginNotFound): importer = None config = None return importer, config
def GET(self, repo_id, importer_id): # importer_id is there to meet the REST requirement, so leave it there # despite it not being used in this method. importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) return self.ok(importer)
def GET(self, repo_id, importer_id, schedule_id): importer_manager = manager_factory.repo_importer_manager() schedule_list = importer_manager.list_sync_schedules(repo_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, importer=importer_id, publish_schedule=schedule_id) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def delete_repo(self, repo_id): """ Deletes the given repository, optionally requesting the associated importer clean up any content in the repository. :param repo_id: identifies the repo being deleted :type repo_id: str :raise MissingResource: if the given repo does not exist :raise OperationFailed: if any part of the delete process fails; the exception will contain information on which sections failed """ # Validation found = Repo.get_collection().find_one({'id': repo_id}) if found is None: raise MissingResource(repo_id) # With so much going on during a delete, it's possible that a few things # could go wrong while others are successful. We track lesser errors # that shouldn't abort the entire process until the end and then raise # an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user # will have to look at the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments # Remove and scheduled activities scheduler = dispatch_factory.scheduler() importer_manager = manager_factory.repo_importer_manager() importers = importer_manager.get_importers(repo_id) if importers: for schedule_id in importer_manager.list_sync_schedules(repo_id): scheduler.remove(schedule_id) distributor_manager = manager_factory.repo_distributor_manager() for distributor in distributor_manager.get_distributors(repo_id): for schedule_id in distributor_manager.list_publish_schedules( repo_id, distributor['id']): scheduler.remove(schedule_id) # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _LOG.exception( 'Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append((_('Importer Delete Error'), e.args))
def test_update_repo_and_plugins(self): """ Tests the aggregate call to update a repo and its plugins. """ # Setup self.manager.create_repo('repo-1', 'Original', 'Original Description') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('repo-1', 'mock-importer', {'key-i1': 'orig-1'}) distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d1': 'orig-1'}, True, distributor_id='dist-1') distributor_manager.add_distributor('repo-1', 'mock-distributor', {'key-d2': 'orig-2'}, True, distributor_id='dist-2') # Test repo_delta = {'display_name': 'Updated'} new_importer_config = {'key-i1': 'updated-1', 'key-i2': 'new-1'} new_distributor_configs = { 'dist-1': { 'key-d1': 'updated-1' }, } # only update one of the two distributors repo = self.manager.update_repo_and_plugins('repo-1', repo_delta, new_importer_config, new_distributor_configs) # Verify self.assertEqual(repo['id'], 'repo-1') self.assertEqual(repo['display_name'], 'Updated') self.assertEqual(repo['description'], 'Original Description') importer = importer_manager.get_importer('repo-1') self.assertEqual(importer['config'], new_importer_config) dist_1 = distributor_manager.get_distributor('repo-1', 'dist-1') self.assertEqual(dist_1['config'], new_distributor_configs['dist-1']) dist_2 = distributor_manager.get_distributor('repo-1', 'dist-2') self.assertEqual(dist_2['config'], {'key-d2': 'orig-2'})
def validate_importer(repo_id, importer_id): """ Validate that the importer exists for the specified repo :param repo_id: unique ID for a repository :type repo_id: basestring :param importer_id: unique ID for an importer :type importer_id: basestring :raise: pulp.server.exceptions.MissingResource """ # make sure the passed in importer id matches the current importer on the repo importer_manager = managers_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer_id != importer['id']: raise exceptions.MissingResource(importer=importer_id)
def PUT(self, repo_id, importer_id, schedule_id): importer_manager = manager_factory.repo_importer_manager() schedule_list = importer_manager.list_sync_schedules(repo_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, importer=importer_id, publish_schedule=schedule_id) sync_updates = {} schedule_updates = self.params() if 'override_config' in schedule_updates: sync_updates['override_config'] = schedule_updates.pop( 'override_config') schedule_manager = manager_factory.schedule_manager() resources = { dispatch_constants.RESOURCE_REPOSITORY_TYPE: { repo_id: dispatch_constants.RESOURCE_READ_OPERATION }, dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE: { importer_id: dispatch_constants.RESOURCE_READ_OPERATION }, dispatch_constants.RESOURCE_SCHEDULE_TYPE: { schedule_id: dispatch_constants.RESOURCE_UPDATE_OPERATION } } tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_sync_schedule') ] call_request = CallRequest(schedule_manager.update_sync_schedule, [ repo_id, importer_id, schedule_id, sync_updates, schedule_updates ], resources=resources, tags=tags, archive=True) execution.execute(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def DELETE(self, repo_id, importer_id): importer_manager = manager_factory.repo_importer_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('delete_importer') ] call_request = CallRequest(importer_manager.remove_importer, [repo_id], tags=tags, archive=True) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.deletes_resource( dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) result = execution.execute(call_request) return self.ok(result)